per_node_diag = [{'ns_1@10.2.1.100', [{version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {manifest, ["bucket_engine_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r", "curl-7.21.1-w64_patched.tar.gz\r", "ep-engine_1.6.5r_4_g9d25ede-MINGW32_NT-6.0.i686.tar.gz\r", "libconflate_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "libevent-2.0.7-rc.tar.gz\r", "libmemcached-0.41_trond-norbye_mingw32-revno895.tar.gz\r", "libvbucket_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "membase-cli_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "memcached_1.4.4_359_g06c7d3b-MINGW32_NT-6.0.i686.tar.gz\r", "moxi_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "ns_server_1.6.5r.tar.gz\r", "pthreads-w64-2-8-0-release.tar.gz\r", "vbucketmigrator_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r", "wallace_1.6.5r-2-gc6cf01c-win64-201012280140\r"]}, {config, [{{node,'ns_1@10.2.1.101',ns_log}, [{filename, "c:/Program Files/Membase/Server/data/ns_1/ns_log"}]}, {{node,'ns_1@10.2.1.102',memcached}, [{port,11210}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines, [{membase, [{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached, [{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}]}, {otp, [{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]}, {cookie,pmqchiglstnppkwf}]}, {memory_quota,3268}, {{node,'ns_1@10.2.1.102',ns_log}, [{filename, "c:/Program Files/Membase/Server/data/ns_1/ns_log"}]}, {{node,'ns_1@10.2.1.100',membership},active}, {rebalance_status, {none, <<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>}}, {{node,'ns_1@10.2.1.101',membership},active}, {rest_creds, [{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]}, {creds, [{"Administrator",[{password,'filtered-out'}]}]}]}, {buckets, [{'_vclock',[{'ns_1@10.2.1.100',{9,63461309965}}]}, {configs, [{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers, ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102']}, {map, [['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]}]}, {port_servers, [{moxi,"./bin/moxi/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,stderr_to_stdout,stream]}, {memcached,"./bin/memcached/memcached", ["-X","./bin/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","./bin/bucket_engine/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,stream]}]}, {alerts, [{email,[]}, {email_alerts,false}, {email_server, [{user,undefined}, {pass,'filtered-out'}, {addr,undefined}, {port,undefined}, {encrypt,false}]}, {alerts, [server_down,server_unresponsive,server_up, server_joined,server_left,bucket_created, bucket_deleted,bucket_auth_failed]}]}, {nodes_wanted, [{'_vclock',[{'ns_1@10.2.1.100',{2,63461308289}}]}, 'ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']}, {rest, [{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]}, {port,8091}]}, {{node,'ns_1@10.2.1.102',membership},active}, {{node,'ns_1@10.2.1.100',isasl}, [{path, "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}]}, {{node,'ns_1@10.2.1.101',isasl}, [{path, "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}]}, {{node,'ns_1@10.2.1.102',isasl}, [{path, "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}]}, {moxi,[{port,11211},{verbosity,[]}]}, {replication,[{enabled,true}]}, {{node,'ns_1@10.2.1.100',memcached}, [{'_vclock',[{'ns_1@10.2.1.100',{1,63461307259}}]}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines, [{membase, [{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached, [{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}]}, {{node,'ns_1@10.2.1.101',memcached}, [{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines, [{membase, [{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached, [{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}]}, {{node,'ns_1@10.2.1.100',ns_log}, [{filename, "c:/Program Files/Membase/Server/data/ns_1/ns_log"}]}]}, {basic_info, [{version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3299}, {memory_data,{4284698624,4184846336,{<0.299.0>,6656756}}}, {disk_data, [{"C:\\",48162864,60}, {"D:\\",51279476,0}, {"G:\\",34724465,17}]}]}, {processes, [{<0.0.0>, [{registered_name,init}, {status,waiting}, {initial_call,{otp_ring0,start,2}}, {backtrace, <<"Program counter: 0x00ecfcb8 (init:loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a83808 Return addr 0x00bdc194 ()\ny(0) {state,[{'-root',[<<37 bytes>>]},{'-progname',[<<3 bytes>>]},{'-home',[<<10 bytes>>]},{'-name',[<<15 bytes>>]},{'-pa',[<<18 bytes>>]},{'-pa',[<<29 bytes>>]},{'-pa',[<<32 bytes>>]},{'-pa',[<<46 bytes>>]},{'-pa',[<<32 bytes>>]},{'-setcookie',[<<8 bytes>>]},{'-ns_server',[<<19 bytes>>,<<6 bytes>>]},{'-ns_server',[<<24 bytes>>,<<8 bytes>>]},{'-ns_server',[<<24 bytes>>,<<2 bytes>>]},{'-kernel',[<<20 bytes>>,<<5 bytes>>,<<20 bytes>>,<<5 bytes>>]},{'-ns_server',[<<14 bytes>>,<<32 bytes>>]}],[],[[ns_bootstrap,override_resolver]],[{application_controller,<0.7.0>},{error_logger,<0.6.0>},{erl_prim_loader,<0.3.0>}],<0.2.0>,{started,started},{\"OTP APN 181 01\",\"R13B03\"},[],[]}\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,544}]}, {heap_size,2584}, {total_heap_size,4181}, {links,[<0.6.0>,<0.7.0>,<0.3.0>]}, {memory,17140}, {message_queue_len,0}, {reductions,130426}, {trap_exit,true}]}, {<0.3.0>, [{registered_name,erl_prim_loader}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x00effd88 (erl_prim_loader:loop/3 + 92)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05607ce4 Return addr 0x00bdc194 ()\ny(0) []\ny(1) [\"bin/ns_server/deps/gen_smtp/ebin\",\"bin/ns_server/deps/menelaus/deps/mochiweb/ebin\",\"bin/ns_server/deps/menelaus/ebin\",\"bin/ns_server/deps/emoxi/ebin\",\"bin/ns_server/ebin\",\"C:\\PROGRA~1\\Membase\\Server\\bin\\erlang/lib/kernel-2.13.4/ebin\",\"C:\\PROGRA~1\\Membase\\Server\\bin\\erlang/lib/stdlib-1.16.4/ebin\"]\ny(2) <0.2.0>\ny(3) {state,efile,[],none,#Port<0.1>,infinity,undefined,true,{prim_state,false,undefined,undefined}}\ny(4) infinity\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,528}]}, {heap_size,4181}, {total_heap_size,15127}, {links,[#Port<0.1>,<0.0.0>]}, {memory,60904}, {message_queue_len,0}, {reductions,1327391}, {trap_exit,true}]}, {<0.6.0>, [{registered_name,error_logger}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00ef4db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05b0b39c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,ns_log_mf_h,false,{state,\"logs\",10485760,10,2856722,1,{file_descriptor,prim_file,{#Port<0.1578>,688}},[],#Fun},false},{handler,error_logger,false,[],false}]\ny(3) error_logger\ny(4) <0.2.0>\n\n0x05b0b3b4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,233}]}, {heap_size,4181}, {total_heap_size,10946}, {links,[<0.0.0>,<0.31.0>,#Port<0.1578>]}, {memory,44260}, {message_queue_len,0}, {reductions,1290627}, {trap_exit,true}]}, {<0.7.0>, [{registered_name,application_controller}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a89850 Return addr 0x00bdc194 ()\ny(0) []\ny(1) infinity\ny(2) application_controller\ny(3) {state,[],[],[],[{inets,<0.427.0>},{ns_server,<0.59.0>},{mnesia,<0.113.0>},{os_mon,<0.50.0>},{sasl,<0.39.0>},{stdlib,undefined},{kernel,<0.9.0>}],[],[{inets,temporary},{ns_server,temporary},{mnesia,temporary},{os_mon,temporary},{sasl,temporary},{stdlib,permanent},{kernel,permanent}],[],[]}\ny(4) application_controller\ny(5) <0.2.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,32}]}, {heap_size,4181}, {total_heap_size,21892}, {links, [<0.50.0>,<0.113.0>,<0.427.0>,<0.59.0>,<0.9.0>, <0.39.0>,<0.0.0>]}, {memory,88124}, {message_queue_len,0}, {reductions,42916}, {trap_exit,true}]}, {<0.9.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x032afdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7d4d4 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.10.0>,{appl_data,kernel,[application_controller,erl_reply,auth,boot_server,code_server,disk_log_server,disk_log_sup,erl_prim_loader,error_logger,file_server_2,fixtable_server,global_group,global_name_server,heart,init,kernel_config,kernel_sup,net_kernel,net_sup,rex,user,os_server,ddll_server,erl_epmd,inet_db,pg2],undefined,{kernel,[]},[application,application_controller,application_master,application_starter,auth,code,packages,code_server,dist_util,erl_boot_server,erl_distribution,erl_reply,error_handler,error_logger,file,file_server,file_io_server,global,global_group,global_search,group,heart,hipe_unified_loader,inet6_tcp,inet6_tcp_dist,inet6_udp,inet6_sctp,inet_config,inet_hosts,inet_gethost_native,inet_tcp_dist,kernel,kernel_config,net,net_adm,net_kernel,os,ram_file,rpc,user,user_drv,user_sup,disk_log,disk_log_1,disk_log_server,disk_log_sup,dist_ac,erl_ddll,erl_epmd,erts_debug,gen_tcp,gen_udp,gen_sctp,inet,inet_db,inet_dns,inet_parse,inet_res,inet_tcp,inet_udp,inet_sctp,pg2,seq_trace,standard_error,wrap_log_reader],[],infinity,infinity},[],0,<0.0.0>}\ny(2) <0.7.0>\n\n0x00f7d4e4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.7.0>,<0.10.0>]}, {memory,3472}, {message_queue_len,0}, {reductions,44}, {trap_exit,true}]}, {<0.10.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032b10fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7f8d8 Return addr 0x00bdc194 ()\ny(0) []\ny(1) kernel\ny(2) <0.11.0>\ny(3) <0.9.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.9.0>,<0.11.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,72}, {trap_exit,true}]}, {<0.11.0>, [{registered_name,kernel_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f967e0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,kernel_sup},one_for_all,[{child,<0.34.0>,kernel_safe_sup,{supervisor,start_link,[{local,kernel_safe_sup},kernel,safe]},permanent,infinity,supervisor,[kernel]},{child,<0.33.0>,kernel_config,{kernel_config,start_link,[]},permanent,2000,worker,[kernel_config]},{child,<0.29.0>,user,{user_sup,start,[]},temporary,2000,supervisor,[user_sup]},{child,<0.27.0>,standard_error,{standard_error,start_link,[]},temporary,2000,supervisor,[user_sup]},{child,<0.26.0>,code_server,{code,start_link,[]},permanent,2000,worker,[code]},{child,<0.25.0>,file_server_2,{file_server,start_link,[]},permanent,2000,worker,[file,file_server,file_io_server,prim_file]},{child,<0.24.0>,global_group,{global_group,start_link,[]},permanent,2000,worker,[global_group]},{child,<0.18.0>,net_sup,{erl_distribution,start_link,[]},permanent,infinity,supervisor,[erl_distribution]},{child,<0.17.0>,inet_db,{inet_db,start_link,[]},permanent,2000,worker,[inet_db]},{child,<0.13.0>,global_name_server,{global,start_link,[]},permanent,2000,worker,[global]},{child,<0.12.0>,rex,{rpc,start_link,[]},permanent,2000,worker,[rpc]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,1,[],kernel,[]}\ny(4) kernel_sup\ny(5) <0.10.0>\n\n0x00f967fc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,4181}, {total_heap_size,8362}, {links, [<0.26.0>,<0.29.0>,<0.33.0>,<0.34.0>,<0.27.0>, <0.17.0>,<0.24.0>,<0.25.0>,<0.18.0>,<0.12.0>, <0.13.0>,<0.10.0>]}, {memory,34104}, {message_queue_len,0}, {reductions,3131}, {trap_exit,true}]}, {<0.12.0>, [{registered_name,rex}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05b0723c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) rpc\ny(3) {1,{<0.12007.0>,{<0.12006.0>,{#Ref<0.0.0.246339>,'ns_1@10.2.1.100'}},nil,nil}}\ny(4) rex\ny(5) <0.11.0>\n\n0x05b07258 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,344}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.11.0>]}, {memory,12360}, {message_queue_len,0}, {reductions,57712}, {trap_exit,true}]}, {<0.13.0>, [{registered_name,global_name_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d2a8b8 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) global\ny(3) {state,true,['ns_1@10.2.1.101','ns_1@10.2.1.102'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],[],[],'nonode@nohost',<0.14.0>,<0.15.0>,<0.16.0>,no_trace,false}\ny(4) global_name_server\ny(5) <0.11.0>\n\n0x00d2a8d4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,500}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<0.14.0>,<0.16.0>,<0.15.0>,<0.11.0>]}, {memory,8472}, {message_queue_len,0}, {reductions,163826}, {trap_exit,true}]}, {<0.14.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x032eb31c (global:loop_the_locker/1 + 588)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05b3ece4 Return addr 0x032eb0b0 (global:init_the_locker/1 + 192)\ny(0) {multi,[],[],['ns_1@10.2.1.102','ns_1@10.2.1.101'],'ns_1@10.2.1.102',false,false}\ny(1) infinity\n\n0x05b3ecf0 Return addr 0x00bdc194 ()\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,5}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.13.0>]}, {memory,5832}, {message_queue_len,0}, {reductions,760}, {trap_exit,true}]}, {<0.15.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x032ef3c8 (global:collect_deletions/2 + 76)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7e5d4 Return addr 0x032ef348 (global:loop_the_deleter/1 + 36)\ny(0) infinity\ny(1) []\ny(2) <0.13.0>\n\n0x00f7e5e4 Return addr 0x00bdc194 ()\ny(0) <0.13.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.13.0>]}, {memory,1308}, {message_queue_len,0}, {reductions,4}, {trap_exit,false}]}, {<0.16.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x032ef4e8 (global:loop_the_registrar/0 + 12)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0466ea70 Return addr 0x00bdc194 ()\ny(0) []\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.13.0>]}, {memory,4324}, {message_queue_len,0}, {reductions,249}, {trap_exit,false}]}, {<0.17.0>, [{registered_name,inet_db}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b15f10 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) inet_db\ny(3) {state,inet_db,inet_cache,inet_hosts_byname,inet_hosts_byaddr,inet_hosts_file_byname,inet_hosts_file_byaddr,#Ref<0.0.0.8>}\ny(4) inet_db\ny(5) <0.11.0>\n\n0x04b15f2c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,24}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.11.0>]}, {memory,3452}, {message_queue_len,0}, {reductions,2423}, {trap_exit,true}]}, {<0.18.0>, [{registered_name,net_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f77878 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,net_sup},one_for_all,[{child,<0.21.0>,net_kernel,{net_kernel,start_link,[['ns_1@10.2.1.100',longnames]]},permanent,2000,worker,[net_kernel]},{child,<0.20.0>,auth,{auth,start_link,[]},permanent,2000,worker,[auth]},{child,<0.19.0>,erl_epmd,{erl_epmd,start_link,[]},permanent,2000,worker,[erl_epmd]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,1,[],erl_distribution,['ns_1@10.2.1.100',longnames]}\ny(4) net_sup\ny(5) <0.11.0>\n\n0x00f77894 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,987}, {links,[<0.19.0>,<0.20.0>,<0.21.0>,<0.11.0>]}, {memory,4444}, {message_queue_len,0}, {reductions,265}, {trap_exit,true}]}, {<0.19.0>, [{registered_name,erl_epmd}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f633c8 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) erl_epmd\ny(3) {state,#Port<0.473>,21100,ns_1}\ny(4) erl_epmd\ny(5) <0.18.0>\n\n0x00f633e4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.18.0>,#Port<0.473>]}, {memory,1388}, {message_queue_len,0}, {reductions,135}, {trap_exit,false}]}, {<0.20.0>, [{registered_name,auth}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05021d10 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) auth\ny(3) {state,pmqchiglstnppkwf,12}\ny(4) auth\ny(5) <0.18.0>\n\n0x05021d2c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,26}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.18.0>]}, {memory,3452}, {message_queue_len,0}, {reductions,3678}, {trap_exit,true}]}, {<0.21.0>, [{registered_name,net_kernel}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0618ff90 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) net_kernel\ny(3) {state,'ns_1@10.2.1.100','ns_1@10.2.1.100',longnames,{tick,<0.23.0>,15000},7000,sys_dist,[{<0.5157.0>,'ns_1@10.2.1.102'},{<0.449.0>,'ns_1@10.2.1.101'}],[],[{listen,#Port<0.460>,<0.22.0>,{net_address,{{0,0,0,0},21100},\"WIN-U2A76A2MES1\",tcp,inet},inet_tcp_dist}],[],0,all}\ny(4) net_kernel\ny(5) <0.18.0>\n\n0x0618ffac Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,114}]}, {heap_size,1597}, {total_heap_size,1974}, {links, [<0.23.0>,<0.449.0>,<0.5157.0>,<0.18.0>,<0.22.0>, #Port<0.460>]}, {memory,8448}, {message_queue_len,0}, {reductions,33199}, {trap_exit,true}]}, {<0.22.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,accept_loop,2}}, {backtrace, <<"Program counter: 0x00ee92d8 (prim_inet:accept0/2 + 92)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d28044 Return addr 0x03318894 (inet_tcp:accept/1 + 20)\ny(0) 11629\ny(1) #Port<0.460>\n\n0x00d28050 Return addr 0x03312550 (inet_tcp_dist:accept_loop/2 + 48)\ny(0) []\n\n0x00d28058 Return addr 0x00bdc194 ()\ny(0) []\ny(1) #Port<0.460>\ny(2) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,139}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.21.0>]}, {memory,12260}, {message_queue_len,0}, {reductions,126016}, {trap_exit,false}]}, {<0.23.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{net_kernel,ticker,2}}, {backtrace, <<"Program counter: 0x03322384 (net_kernel:ticker_loop/2 + 28)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f80438 Return addr 0x00bdc194 ()\ny(0) 15000\ny(1) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.21.0>]}, {memory,1308}, {message_queue_len,0}, {reductions,441}, {trap_exit,false}]}, {<0.24.0>, [{registered_name,global_group}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f89450 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) global_group\ny(3) {state,no_conf,true,[],[],[],[],[],'ns_1@10.2.1.100',[],normal,normal}\ny(4) global_group\ny(5) <0.11.0>\n\n0x00f8946c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.11.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,76}, {trap_exit,true}]}, {<0.25.0>, [{registered_name,file_server_2}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04ac92b0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) file_server\ny(3) #Port<0.496>\ny(4) file_server_2\ny(5) <0.11.0>\n\n0x04ac92cc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,4406}]}, {heap_size,4181}, {total_heap_size,8362}, {links,[#Port<0.496>,<0.11.0>]}, {memory,33904}, {message_queue_len,0}, {reductions,4147997}, {trap_exit,true}]}, {<0.26.0>, [{registered_name,code_server}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x0334727c (code_server:loop/1 + 64)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x050b90c0 Return addr 0x00bdc194 ()\ny(0) {state,<0.11.0>,\"c:/PROGRA~1/Membase/Server/bin/erlang\",[\"bin/ns_server/deps/gen_smtp/ebin\",\"bin/ns_server/deps/menelaus/deps/mochiweb/ebin\",\"bin/ns_server/deps/menelaus/ebin\",\"bin/ns_server/ebin\",\".\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/kernel-2.13.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/stdlib-1.16.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/xmerl-1.2.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/webtool-0.8.5/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/typer-0.1.7.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/tv-2.1.4.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/tools-2.6.5/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/test_server-3.3.5/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/syntax_tools-1.6.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/ssl-3.10.7/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/ssh-1.1.7/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/snmp-4.15/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/sasl-2.1.8/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/runtime_tools-1.8.2/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/reltool-0.5.2/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/public_key-0.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/pman-2.7.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/percept-0.8.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/parsetools-2.0.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/otp_mibs-1.0.6/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/os_mon-2.2.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/odbc-2.10.6/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/observer-0.9.8.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/mnesia-4.4.12/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/inviso-0.6.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/inets-5.2/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/hipe-3.7.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/eunit-2.1.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/et-1.3.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/erts-5.7.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/erl_interface-3.6.4\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/erl_docgen-0.1\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/dialyzer-2.1.0/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/debugger-3.2.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/crypto-1.6.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/compiler-4.6.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/common_test-1.4.6/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/appmon-2.1.10.2/ebin\",\"c:/Program Files/Membase/Server/bin/ns_server/deps/menelaus/deps/erlwsh/ebin\"],4111,8208,no_cache,interactive,[]}\ny(1) <0.11.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,421}]}, {heap_size,6765}, {total_heap_size,24476}, {links,[<0.11.0>]}, {memory,98280}, {message_queue_len,0}, {reductions,262009}, {trap_exit,true}]}, {<0.27.0>, [{registered_name,standard_error_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f88e78 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_bridge\ny(3) {state,standard_error,<0.28.0>,<0.28.0>,{local,standard_error_sup}}\ny(4) standard_error_sup\ny(5) <0.11.0>\n\n0x00f88e94 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.11.0>,<0.28.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,40}, {trap_exit,true}]}, {<0.28.0>, [{registered_name,standard_error}, {status,waiting}, {initial_call,{standard_error,server,2}}, {backtrace, <<"Program counter: 0x0333196c (standard_error:server_loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8dd34 Return addr 0x00bdc194 ()\ny(0) #Port<0.792>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.27.0>,#Port<0.792>]}, {memory,1388}, {message_queue_len,0}, {reductions,7}, {trap_exit,true}]}, {<0.29.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8ad50 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_bridge\ny(3) {state,user_sup,<0.31.0>,<0.31.0>,{<0.29.0>,user_sup}}\ny(4) <0.29.0>\ny(5) <0.11.0>\n\n0x00f8ad6c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,1597}, {total_heap_size,1597}, {links,[<0.11.0>,<0.31.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,166}, {trap_exit,true}]}, {<0.31.0>, [{registered_name,user}, {status,waiting}, {initial_call,{user,server,2}}, {backtrace, <<"Program counter: 0x0336dd88 (user:get_chars/8 + 176)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03088bb8 Return addr 0x0336aea0 (user:do_io_request/5 + 56)\ny(0) []\ny(1) []\ny(2) []\ny(3) unicode\ny(4) start\ny(5) {[],[]}\ny(6) #Port<0.830>\ny(7) {erl_scan,tokens,[1]}\ny(8) get_until\ny(9) io_lib\ny(10) [40,\"ns_1@10.2.1.100\",41,\"1\",62,32]\n\n0x03088be8 Return addr 0x0336adc8 (user:server_loop/2 + 784)\ny(0) #Port<0.830>\ny(1) <0.31.0>\ny(2) <0.48.0>\n\n0x03088bf8 Return addr 0x0336a894 (user:catch_loop/3 + 56)\ny(0) #Port<0.830>\n\n0x03088c00 Return addr 0x00bdc194 ()\ny(0) <0.32.0>\ny(1) #Port<0.830>\ny(2) Catch 0x0336a894 (user:catch_loop/3 + 56)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,22}]}, {heap_size,2584}, {total_heap_size,6765}, {links,[<0.29.0>,<0.32.0>,#Port<0.830>,<0.6.0>]}, {memory,27596}, {message_queue_len,0}, {reductions,30017}, {trap_exit,true}]}, {<0.32.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03f42924 (shell:get_command1/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f87790 Return addr 0x03f422ec (shell:server_loop/7 + 148)\ny(0) []\ny(1) 12305\ny(2) []\ny(3) <0.47.0>\ny(4) <0.48.0>\n\n0x00f877a8 Return addr 0x00bdc194 ()\ny(0) []\ny(1) []\ny(2) 1\ny(3) 20\ny(4) 20\ny(5) []\ny(6) 12305\ny(7) []\ny(8) 0\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,4}]}, {heap_size,2584}, {total_heap_size,20295}, {links,[<0.47.0>,<0.48.0>,<0.31.0>]}, {memory,81656}, {message_queue_len,0}, {reductions,5170}, {trap_exit,true}]}, {<0.33.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8d968 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) kernel_config\ny(3) []\ny(4) <0.33.0>\ny(5) <0.11.0>\n\n0x00f8d984 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.11.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,268}, {trap_exit,true}]}, {<0.34.0>, [{registered_name,kernel_safe_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f615c4 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,kernel_safe_sup},one_for_one,[{child,<0.135.0>,dets,{dets_server,start_link,[]},permanent,2000,worker,[dets_server]},{child,<0.134.0>,dets_sup,{dets_sup,start_link,[]},permanent,1000,supervisor,[dets_sup]},{child,<0.130.0>,disk_log_server,{disk_log_server,start_link,[]},permanent,2000,worker,[disk_log_server]},{child,<0.129.0>,disk_log_sup,{disk_log_sup,start_link,[]},permanent,1000,supervisor,[disk_log_sup]},{child,<0.57.0>,timer_server,{timer,start_link,[]},permanent,1000,worker,[timer]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},4,3600,[],kernel,safe}\ny(4) kernel_safe_sup\ny(5) <0.11.0>\n\n0x00f615e0 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,610}, {total_heap_size,987}, {links, [<0.130.0>,<0.134.0>,<0.135.0>,<0.57.0>,<0.129.0>, <0.11.0>]}, {memory,4484}, {message_queue_len,0}, {reductions,388}, {trap_exit,true}]}, {<0.39.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x032afdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f882cc Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.40.0>,{appl_data,sasl,[sasl_sup,alarm_handler,overload,release_handler],undefined,{sasl,[]},[sasl,alarm_handler,format_lib_supp,misc_supp,overload,rb,rb_format_supp,release_handler,release_handler_1,erlsrv,sasl_report,sasl_report_tty_h,sasl_report_file_h,systools,systools_make,systools_rc,systools_relup,systools_lib],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x00f882dc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.40.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.40.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032b10fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8b110 Return addr 0x00bdc194 ()\ny(0) {state,tty,undefined}\ny(1) sasl\ny(2) <0.41.0>\ny(3) <0.39.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.39.0>,<0.41.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,70}, {trap_exit,true}]}, {<0.41.0>, [{registered_name,sasl_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f87f10 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,sasl_sup},one_for_one,[{child,<0.45.0>,release_handler,{release_handler,start_link,[]},permanent,2000,worker,[]},{child,<0.42.0>,sasl_safe_sup,{supervisor,start_link,[{local,sasl_safe_sup},sasl,safe]},permanent,infinity,supervisor,[sasl]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,1,[],sasl,[]}\ny(4) sasl_sup\ny(5) <0.40.0>\n\n0x00f87f2c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.42.0>,<0.45.0>,<0.40.0>]}, {memory,2916}, {message_queue_len,0}, {reductions,158}, {trap_exit,true}]}, {<0.42.0>, [{registered_name,sasl_safe_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8bc10 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,sasl_safe_sup},one_for_one,[{child,<0.44.0>,overload,{overload,start_link,[]},permanent,2000,worker,[overload]},{child,<0.43.0>,alarm_handler,{alarm_handler,start_link,[]},permanent,2000,worker,dynamic}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},4,3600,[],sasl,safe}\ny(4) sasl_safe_sup\ny(5) <0.41.0>\n\n0x00f8bc2c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.43.0>,<0.44.0>,<0.41.0>]}, {memory,2916}, {message_queue_len,0}, {reductions,174}, {trap_exit,true}]}, {<0.43.0>, [{registered_name,alarm_handler}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00ef4db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f87b64 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,alarm_handler,false,[{system_memory_high_watermark,[]}],false}]\ny(3) alarm_handler\ny(4) <0.42.0>\n\n0x00f87b7c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.42.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,43}, {trap_exit,true}]}, {<0.44.0>, [{registered_name,overload}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8b860 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) overload\ny(3) {state,0,0,8.000000e-001,281,1.000000e-001,{0,0},clear}\ny(4) overload\ny(5) <0.42.0>\n\n0x00f8b87c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.42.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,39}, {trap_exit,false}]}, {<0.45.0>, [{registered_name,release_handler}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f81774 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) release_handler\ny(3) {state,[],\"C:\\PROGRA~1\\Membase\\Server\\bin\\erlang\",\"c:/PROGRA~1/Membase/Server/bin/erlang/releases\",[{release,\"OTP APN 181 01\",\"R13B03\",undefined,[],permanent}],undefined,{no_check,\"c:/PROGRA~1/Membase/Server/bin/erlang/bin/start\"},false,false,false,[]}\ny(4) release_handler\ny(5) <0.41.0>\n\n0x00f81790 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,5}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.41.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,1249}, {trap_exit,false}]}, {<0.47.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03f45484 (shell:eval_loop/3 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8e0d4 Return addr 0x00bdc194 ()\ny(0) []\ny(1) []\ny(2) 12305\ny(3) []\ny(4) <0.32.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.32.0>]}, {memory,1308}, {message_queue_len,0}, {reductions,8}, {trap_exit,false}]}, {<0.48.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03fa72c8 (io:wait_io_mon_reply/2 + 28)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8e484 Return addr 0x03fa6c0c (io:parse_erl_exprs/3 + 100)\ny(0) #Ref<0.0.0.44>\ny(1) <0.31.0>\n\n0x00f8e490 Return addr 0x03f4c180 (shell:'-get_command/5-fun-0-'/1 + 20)\ny(0) []\n\n0x00f8e498 Return addr 0x00bdc194 ()\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.32.0>]}, {memory,1348}, {message_queue_len,0}, {reductions,14}, {trap_exit,false}]}, {<0.50.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x032afdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f84d5c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.51.0>,{appl_data,os_mon,[os_mon_sup,os_mon_sysinfo,disksup,memsup,cpu_sup,os_sup_server],undefined,{os_mon,[]},[os_mon,os_mon_mib,os_sup,disksup,memsup,cpu_sup,os_mon_sysinfo,nteventlog],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x00f84d6c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.51.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.51.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032b10fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f83090 Return addr 0x00bdc194 ()\ny(0) []\ny(1) os_mon\ny(2) <0.52.0>\ny(3) <0.50.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.50.0>,<0.52.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,40}, {trap_exit,true}]}, {<0.52.0>, [{registered_name,os_mon_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f9a5b8 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,os_mon_sup},one_for_one,[{child,<0.55.0>,memsup,{memsup,start_link,[]},permanent,2000,worker,[memsup]},{child,<0.54.0>,disksup,{disksup,start_link,[]},permanent,2000,worker,[disksup]},{child,<0.53.0>,os_mon_sysinfo,{os_mon_sysinfo,start_link,[]},permanent,2000,worker,[os_mon_sysinfo]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},5,3600,[],os_mon,[]}\ny(4) os_mon_sup\ny(5) <0.51.0>\n\n0x00f9a5d4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.53.0>,<0.54.0>,<0.55.0>,<0.51.0>]}, {memory,3512}, {message_queue_len,0}, {reductions,274}, {trap_exit,true}]}, {<0.53.0>, [{registered_name,os_mon_sysinfo}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04fffcfc Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) os_mon_sysinfo\ny(3) {state,#Port<0.1438>}\ny(4) os_mon_sysinfo\ny(5) <0.52.0>\n\n0x04fffd18 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,18}]}, {heap_size,2584}, {total_heap_size,3194}, {links,[<0.52.0>,#Port<0.1438>]}, {memory,13232}, {message_queue_len,0}, {reductions,6883}, {trap_exit,true}]}, {<0.54.0>, [{registered_name,disksup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05b35edc Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) disksup\ny(3) {state,80,60000,{win32,nt},[{\"C:\\\",48162864,60},{\"D:\\\",51279476,0},{\"G:\\\",34724465,17}],not_used}\ny(4) disksup\ny(5) <0.52.0>\n\n0x05b35ef8 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,11}]}, {heap_size,2584}, {total_heap_size,5168}, {links,[<0.52.0>]}, {memory,21108}, {message_queue_len,0}, {reductions,58502}, {trap_exit,true}]}, {<0.55.0>, [{registered_name,memsup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0618dd28 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) memsup\ny(3) {state,{win32,nt},false,{4184846336,4284698624},{<0.299.0>,6656756},false,60000,30000,8.000000e-001,5.000000e-002,<0.11983.0>,undefined,undefined,[],[]}\ny(4) memsup\ny(5) <0.52.0>\n\n0x0618dd44 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<0.52.0>]}, {memory,8332}, {message_queue_len,0}, {reductions,134769}, {trap_exit,true}]}, {<0.57.0>, [{registered_name,timer_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d21258 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) 400\ny(2) timer\ny(3) []\ny(4) timer_server\ny(5) <0.34.0>\n\n0x00d21274 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,6531}]}, {heap_size,1597}, {total_heap_size,1974}, {links, [<0.121.0>,<0.152.0>,<0.322.0>,<0.609.0>,<0.262.0>, <0.125.0>,<0.90.0>,<0.93.0>,<0.110.0>,<0.72.0>, <0.79.0>,<0.34.0>]}, {memory,8552}, {message_queue_len,0}, {reductions,1966544}, {trap_exit,true}]}, {<0.59.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x032afdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8367c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.60.0>,{appl_data,ns_server,[ns_server_sup,ns_config,ns_config_sup,ns_config_events,ns_node_disco,ns_node_disco_events],undefined,{ns_server,[]},[misc,ns_config,ns_config_default,ns_config_log,ns_config_sup,ns_config_rep,ns_log,ns_node_disco,ns_node_disco_conf_events,ns_node_disco_log,ns_port_init,ns_port_server,ns_port_sup,ns_server,ns_server_sup],[menelaus],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x00f8368c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<0.7.0>,<0.60.0>]}, {memory,1964}, {message_queue_len,0}, {reductions,44}, {trap_exit,true}]}, {<0.60.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032b10fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f88a30 Return addr 0x00bdc194 ()\ny(0) []\ny(1) ns_server\ny(2) <0.61.0>\ny(3) <0.59.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.59.0>,<0.61.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,50}, {trap_exit,true}]}, {<0.61.0>, [{registered_name,ns_server_cluster_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03082590 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_server_cluster_sup},one_for_one,[{child,<0.71.0>,ns_server_sup,{ns_server_sup,start_link,[]},permanent,infinity,supervisor,[ns_server_sup]},{child,<0.64.0>,ns_config_sup,{ns_config_sup,start_link,[]},permanent,infinity,supervisor,[ns_config_sup]},{child,<0.63.0>,ns_cluster,{ns_cluster,start_link,[]},permanent,5000,worker,[ns_cluster]},{child,<0.62.0>,dist_manager,{dist_manager,start_link,[]},permanent,10,worker,[dist_manager]},{child,undefined,log_os_info,{log_os_info,start_link,[]},transient,10,worker,[log_os_info]},{child,undefined,ns_log_mf_h,{ns_log_mf_h,start_link,[]},transient,10,worker,[ns_log_mf_h]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,1,[],ns_server_cluster_sup,[]}\ny(4) ns_server_cluster_sup\ny(5) <0.60.0>\n\n0x030825ac Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,987}, {links,[<0.62.0>,<0.64.0>,<0.71.0>,<0.63.0>,<0.60.0>]}, {memory,4464}, {message_queue_len,0}, {reductions,2722}, {trap_exit,true}]}, {<0.62.0>, [{registered_name,dist_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0308a638 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) dist_manager\ny(3) {state,false,\"127.0.0.1\"}\ny(4) dist_manager\ny(5) <0.61.0>\n\n0x0308a654 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.61.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,132}, {trap_exit,false}]}, {<0.63.0>, [{registered_name,ns_cluster}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x050966f8 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_cluster\ny(3) {state}\ny(4) ns_cluster\ny(5) <0.61.0>\n\n0x05096714 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,6}]}, {heap_size,6765}, {total_heap_size,10946}, {links,[<0.61.0>]}, {memory,44220}, {message_queue_len,0}, {reductions,43850}, {trap_exit,false}]}, {<0.64.0>, [{registered_name,ns_config_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030aa020 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_config_sup},rest_for_one,[{child,undefined,ns_config_log,{ns_config_log,start_link,[]},transient,10,worker,[]},{child,undefined,ns_config_isasl_sync,{ns_config_isasl_sync,start_link,[]},transient,10,worker,[]},{child,<0.66.0>,ns_config,{ns_config,start_link,[\"priv/config\",ns_config_default]},permanent,10,worker,[ns_config,ns_config_default]},{child,<0.65.0>,ns_config_events,{gen_event,start_link,[{local,ns_config_events}]},permanent,10,worker,[]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},3,10,[],ns_config_sup,[]}\ny(4) ns_config_sup\ny(5) <0.61.0>\n\n0x030aa03c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,6765}, {total_heap_size,7142}, {links,[<0.65.0>,<0.66.0>,<0.61.0>]}, {memory,29044}, {message_queue_len,0}, {reductions,1037}, {trap_exit,true}]}, {<0.65.0>, [{registered_name,ns_config_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00ef4db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b168a8 Return addr 0x00f0d8a0 (proc_lib:wake_up/3 + 60)\ny(0) false\ny(1) []\ny(2) [{handler,ns_pubsub,#Ref<0.0.0.462>,{state,#Fun,ok},<0.151.0>},{handler,ns_pubsub,#Ref<0.0.0.459>,{state,#Fun,undefined},<0.150.0>},{handler,ns_pubsub,#Ref<0.0.0.268>,{state,#Fun,undefined},<0.109.0>},{handler,ns_port_init,false,{state},false},{handler,menelaus_event,ns_config_events,{state,ns_config_events,[{ip,\"0.0.0.0\"},{port,8091},{approot,\"c:/Program Files/Membase/Server/bin/ns_server/deps/menelaus/priv/public\"},{docroot,\"c:/Program Files/Membase/Server/docs\"}],[{<0.10782.0>,#Ref<0.0.0.246301>},{<0.10944.0>,#Ref<0.0.0.245823>},{<0.228.0>,#Ref<0.0.0.1592>}]},false},{handler,ns_node_disco_conf_events,false,{state},false},{handler,ns_config_log,false,{state,[{rebalance_status,{none,<<76 bytes>>}}]},false},{handler,ns_config_isasl_sync,false,{state,[{\"default\",[]}],\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\",1,\"_admin\",\"_admin\"},false}]\ny(3) ns_config_events\ny(4) <0.64.0>\n\n0x04b168c0 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d8b0 (proc_lib:wake_up/3 + 76)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,66}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.109.0>,<0.150.0>,<0.151.0>,<0.64.0>]}, {memory,7004}, {message_queue_len,0}, {reductions,935574}, {trap_exit,true}]}, {<0.66.0>, [{registered_name,ns_config}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0504f19c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_config\ny(3) {config,{full,\"priv/config\",undefined,ns_config_default},[[],[{directory,\"c:/Program Files/Membase/Server/config\"},{nodes_wanted,['ns_1@10.2.1.100']},{{node,'ns_1@10.2.1.100',membership},active},{rest,[{port,8091}]},{rest_creds,[{creds,[]}]},{{node,'ns_1@10.2.1.100',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.100',memcached},[{port,11210},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{memory_quota,3268},{buckets,[{configs,[]}]},{moxi,[{port,11211},{verbosity,[]}]},{port_servers,[{moxi,\"./bin/moxi/moxi\",[\"-Z\",{\"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",[port]},\"-z\",{\"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming\",[{rest,port}]},\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",{\"~s\",[{ns_moxi_sup,rest_user,[]}]}},{\"MOXI_SASL_PLAIN_PWD\",{\"~s\",[{ns_moxi_sup,rest_pass,[]}]}}]},use_stdio,stderr_to_stdout,stream]},{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",{\"~B\",[port]},\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",{\"admin=~s;default_bucket_name=default;auto_create=false\",[admin_user]},{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",{\"~s\",[{isasl,path}]}},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]}]},{{node,'ns_1@10.2.1.100',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{alerts,[{email,[]},{email_alerts,false},{email_server,[{user,undefined},{pass,undefined},{addr,undefined},{port,undefined},{encrypt,false}]},{alerts,[server_down,server_unresponsive,server_up,server_joined,server_left,bucket_created,bucket_deleted,bucket_auth_failed]}]},{replication,[{enabled,true}]}]],[[{{node,'ns_1@10.2.1.101',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.102',memcached},[{port,11210},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{otp,[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{cookie,pmqchiglstnppkwf}]},{memory_quota,3268},{{node,'ns_1@10.2.1.102',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.100',membership},active},{rebalance_status,{none,<<76 bytes>>}},{{node,'ns_1@10.2.1.101',membership},active},{rest_creds,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{creds,[{\"Administrator\",[{password,\"j4958ph\"}]}]}]},{buckets,[{'_vclock',[{'ns_1@10.2.1.100',{9,63461309965}}]},{configs,[{\"default\",[{num_replicas,1},{ram_quota,3426746368},{auth_type,sasl},{sasl_password,[]},{type,membase},{num_vbuckets,1024},{ht_size,3079},{tap_keepalive,0},{tap_noop_interval,20},{max_txn_size,1000},{ht_locks,5},{servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]}]},{port_servers,[{moxi,\"./bin/moxi/moxi\",[\"-Z\",{\"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",[port]},\"-z\",{\"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming\",[{rest,port}]},\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",{\"~s\",[{ns_moxi_sup,rest_user,[]}]}},{\"MOXI_SASL_PLAIN_PWD\",{\"~s\",[{ns_moxi_sup,rest_pass,[]}]}}]},use_stdio,stderr_to_stdout,stream]},{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",{\"~B\",[port]},\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",{\"admin=~s;default_bucket_name=default;auto_create=false\",[admin_user]},{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",{\"~s\",[{isasl,path}]}},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]}]},{alerts,[{email,[]},{email_alerts,false},{email_server,[{user,undefined},{pass,undefined},{addr,undefined},{port,undefined},{encrypt,false}]},{alerts,[server_down,server_unresponsive,server_up,server_joined,server_left,bucket_created,bucket_deleted,bucket_auth_failed]}]},{nodes_wanted,[{'_vclock',[{'ns_1@10.2.1.100',{2,63461308289}}]},'ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{rest,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{port,8091}]},{{node,'ns_1@10.2.1.102',membership},active},{{node,'ns_1@10.2.1.100',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.101',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.102',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{moxi,[{port,11211},{verbosity,[]}]},{replication,[{enabled,true}]},{{node,'ns_1@10.2.1.100',memcached},[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307259}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.101',memcached},[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.100',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]}]],ns_config_default}\ny(4) ns_config\ny(5) <0.64.0>\n\n0x0504f1b8 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,8}]}, {heap_size,46368}, {total_heap_size,92736}, {links,[<0.64.0>]}, {memory,371380}, {message_queue_len,0}, {reductions,1056270}, {trap_exit,false}]}, {<0.71.0>, [{registered_name,ns_server_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x045efd3c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_server_sup},one_for_one,[{child,<0.152.0>,ns_tick,{ns_tick,start_link,[]},permanent,10,worker,[ns_tick]},{child,<0.151.0>,ns_moxi_sup,{ns_moxi_sup,start_link,[]},permanent,infinity,supervisor,[ns_moxi_sup]},{child,<0.150.0>,ns_bad_bucket_sup,{ns_bucket_sup,start_link,[ns_bad_bucket_sup,#Fun,ns_bad_bucket_worker]},permanent,infinity,supervisor,[ns_bucket_sup]},{child,<0.149.0>,ns_bad_bucket_worker,{work_queue,start_link,[ns_bad_bucket_worker]},permanent,10,worker,[work_queue]},{child,<0.111.0>,ns_mnesia,{ns_mnesia,start_link,[]},permanent,10000,worker,[ns_mnesia]},{child,<0.110.0>,ns_orchestrator,{ns_orchestrator,start_link,[]},permanent,20,worker,[ns_orchestrator]},{child,<0.109.0>,ns_good_bucket_sup,{ns_bucket_sup,start_link,[ns_good_bucket_sup,#Fun,ns_good_bucket_worker]},permanent,infinity,supervisor,[ns_bucket_sup]},{child,<0.108.0>,ns_good_bucket_worker,{work_queue,start_link,[ns_good_bucket_worker]},permanent,10,worker,[work_queue]},{child,<0.107.0>,ns_stats_event,{gen_event,start_link,[{local,ns_stats_event}]},permanent,10,worker,dynamic},{child,<0.106.0>,ns_tick_event,{gen_event,start_link,[{local,ns_tick_event}]},permanent,10,worker,dynamic},{child,<0.100.0>,ns_port_sup,{ns_port_sup,start_link,[]},permanent,10,worker,[ns_port_sup]},{child,<0.95.0>,menelaus,{menelaus_app,start_subapp,[]},permanent,infinity,supervisor,[menelaus_app]},{child,<0.93.0>,ns_doctor,{ns_doctor,start_link,[]},permanent,10,worker,[ns_doctor]},{child,<0.90.0>,ns_heart,{ns_heart,start_link,[]},permanent,10,worker,[ns_heart]},{child,<0.77.0>,ns_node_disco_sup,{ns_node_disco_sup,start_link,[]},permanent,infinity,supervisor,[ns_node_disco_sup]},{child,<0.74.0>,ns_mail_sup,{ns_mail_sup,start_link,[]},permanent,infinity,supervisor,[ns_mail_sup]},{child,<0.73.0>,ns_log_events,{gen_event,start_link,[{local,ns_log_events}]},permanent,10,worker,dynamic},{child,<0.72.0>,ns_log,{ns_log,start_link,[]},permanent,10,worker,[ns_log]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_server_sup,[]}\ny(4) ns_server_sup\ny(5) <0.61.0>\n\n0x045efd58 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,46368}, {total_heap_size,121393}, {links, [<0.95.0>,<0.108.0>,<0.149.0>,<0.151.0>,<0.152.0>, <0.150.0>,<0.110.0>,<0.111.0>,<0.109.0>,<0.106.0>, <0.107.0>,<0.100.0>,<0.74.0>,<0.90.0>,<0.93.0>, <0.77.0>,<0.72.0>,<0.73.0>,<0.61.0>]}, {memory,486368}, {message_queue_len,0}, {reductions,55965}, {trap_exit,true}]}, {<0.72.0>, [{registered_name,ns_log}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05913c20 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_log\ny(3) {state,[{log_entry,{1294,87988,384001},'ns_1@10.2.1.100',ns_node_disco,3,\"Initial otp cookie generated: ~p\",[pmqchiglstnppkwf],info},{log_entry,{1294,87988,430002},'ns_1@10.2.1.100',menelaus_app,1,\"Membase Server has started on web port ~p on node ~p.\",[8091,'ns_1@10.2.1.100'],info},{log_entry,{1294,88063,489400},'ns_1@10.2.1.100',menelaus_web,12,\"Created bucket \\\"~s\\\" of type: ~s~n\",[\"default\",membase],info},{log_entry,{1294,88108,136403},'ns_1@10.2.1.100',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.100','ns_1@10.2.1.101'],info},{log_entry,{1294,88108,383002},'ns_1@10.2.1.101',menelaus_app,1,\"Membase Server has started on web port ~p on node ~p.\",[8091,'ns_1@10.2.1.101'],info},{log_entry,{1294,88108,695024},'ns_1@10.2.1.101',ns_cluster,3,\"Node ~s joined cluster\",['ns_1@10.2.1.101'],info},{log_entry,{1294,88115,374400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101'],[]],info},{log_entry,{1294,88121,458400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[wait_for_memcached_failed],info},{log_entry,{1294,88151,83402},'ns_1@10.2.1.100',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.100',1],info},{log_entry,{1294,88198,567003},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.101',1],info},{log_entry,{1294,88206,713403},'ns_1@10.2.1.100',ns_orchestrator,1,\"Rebalance completed successfully.~n\",[],info},{log_entry,{1294,88468,373401},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n (repeated ~p times)\",[['ns_1@10.2.1.100','ns_1@10.2.1.101'],[],1],info},{log_entry,{1294,89089,429002},'ns_1@10.2.1.102',menelaus_app,1,\"Membase Server has started on web port ~p on node ~p.\",[8091,'ns_1@10.2.1.102'],info},{log_entry,{1294,89089,585005},'ns_1@10.2.1.102',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.102','ns_1@10.2.1.101'],info},{log_entry,{1294,89089,773024},'ns_1@10.2.1.102',ns_cluster,3,\"Node ~s joined cluster\",['ns_1@10.2.1.102'],info},{log_entry,{1294,89089,788403},'ns_1@10.2.1.100',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.100','ns_1@10.2.1.102'],info},{log_entry,{1294,89090,304213},'ns_1@10.2.1.101',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.101','ns_1@10.2.1.102'],info},{log_entry,{1294,89092,596400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[]],info},{log_entry,{1294,89098,696400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[wait_for_memcached_failed],info},{log_entry,{1294,89176,806003},'ns_1@10.2.1.102',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.102',1],info},{log_entry,{1294,89428,372401},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n (repeated ~p times)\",[wait_for_memcached_failed,2],info},{log_entry,{1294,89428,372401},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n (repeated ~p times)\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[],3],info},{log_entry,{1294,89842,961400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[stopped],info},{log_entry,{1294,89861,634400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[]],info},{log_entry,{1294,90758,702400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[stopped],info},{log_entry,{1294,90765,67400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[]],info},{log_entry,{1294,90778,796210},'ns_1@10.2.1.101',ns_memcached,4,\"Control connection to memcached on ~p disconnected: ~p\",['ns_1@10.2.1.101',{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]}],info},{log_entry,{1294,90778,858400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[{{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},{gen_server,call,[{'ns_memcached-default','ns_1@10.2.1.101'},{delete_vbucket,633},30000]}}],info},{log_entry,{1294,90781,885212},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.101',0],info},{log_entry,{1294,90813,53400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[{{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},{gen_server,call,[{'ns_memcached-default','ns_1@10.2.1.101'},{delete_vbucket,65},30000]}}],info},{log_entry,{1294,90835,18400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[{{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},{gen_server,call,[{'ns_memcached-default','ns_1@10.2.1.101'},{delete_vbucket,70},30000]}}],info},{log_entry,{1294,91107,927211},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds. (repeated ~p times)\",[\"default\",'ns_1@10.2.1.101',0,9],info},{log_entry,{1294,91107,927211},'ns_1@10.2.1.101',ns_memcached,4,\"Control connection to memcached on ~p disconnected: ~p (repeated ~p times)\",['ns_1@10.2.1.101',{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},9],info},{log_entry,{1294,91108,378401},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n (repeated ~p times)\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[],2],info},{log_entry,{1294,91120,672210},'ns_1@10.2.1.101',ns_memcached,4,\"Control connection to memcached on ~p disconnected: ~p\",['ns_1@10.2.1.101',{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]}],info},{log_entry,{1294,91120,813213},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.101',0],info}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},undefined,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}\ny(4) ns_log\ny(5) <0.71.0>\n\n0x05913c3c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,11}]}, {heap_size,6765}, {total_heap_size,13530}, {links,[<0.57.0>,<0.71.0>]}, {memory,54576}, {message_queue_len,0}, {reductions,18180}, {trap_exit,false}]}, {<0.73.0>, [{registered_name,ns_log_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00bdc190 (unknown function)\nCP: 0x00bdc194 ()\narity = 3\n proc_lib\n wake_up\n [gen_event,wake_hib,[<0.71.0>,ns_log_events,[{handler,ns_mail_log,false,{state},false}],[]]]\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,36}]}, {heap_size,34}, {total_heap_size,34}, {links,[<0.71.0>]}, {memory,572}, {message_queue_len,0}, {reductions,20327}, {trap_exit,true}]}, {<0.74.0>, [{registered_name,ns_mail_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030aa610 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_mail_sup},one_for_all,[{child,undefined,ns_mail_log,{ns_mail_log,start_link,[]},transient,10,worker,[ns_mail_log]},{child,<0.75.0>,ns_mail,{ns_mail,start_link,[]},permanent,10,worker,[ns_mail]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_mail_sup,[]}\ny(4) ns_mail_sup\ny(5) <0.71.0>\n\n0x030aa62c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.75.0>,<0.71.0>]}, {memory,3472}, {message_queue_len,0}, {reductions,680}, {trap_exit,true}]}, {<0.75.0>, [{registered_name,ns_mail}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f99408 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_mail\ny(3) empty_state\ny(4) ns_mail\ny(5) <0.74.0>\n\n0x00f99424 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.74.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,27}, {trap_exit,true}]}, {<0.77.0>, [{registered_name,ns_node_disco_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03082b80 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_node_disco_sup},rest_for_one,[{child,<0.85.0>,ns_config_rep,{ns_config_rep,start_link,[]},permanent,10,worker,[ns_config_rep]},{child,undefined,ns_node_disco_conf_events,{ns_node_disco_conf_events,start_link,[]},transient,10,worker,[]},{child,undefined,ns_node_disco_log,{ns_node_disco_log,start_link,[]},transient,10,worker,[]},{child,<0.79.0>,ns_node_disco,{ns_node_disco,start_link,[]},permanent,10,worker,[]},{child,<0.78.0>,ns_node_disco_events,{gen_event,start_link,[{local,ns_node_disco_events}]},permanent,10,worker,[]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_node_disco_sup,[]}\ny(4) ns_node_disco_sup\ny(5) <0.71.0>\n\n0x03082b9c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.78.0>,<0.79.0>,<0.85.0>,<0.71.0>]}, {memory,3512}, {message_queue_len,0}, {reductions,841}, {trap_exit,true}]}, {<0.78.0>, [{registered_name,ns_node_disco_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00ef4db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a83b9c Return addr 0x00f0d8a0 (proc_lib:wake_up/3 + 60)\ny(0) false\ny(1) []\ny(2) [{handler,menelaus_event,ns_node_disco_events,{state,ns_node_disco_events,undefined,[{<0.10782.0>,#Ref<0.0.0.246303>},{<0.10944.0>,#Ref<0.0.0.245825>},{<0.228.0>,#Ref<0.0.0.1594>}]},false},{handler,ns_node_disco_rep_events,false,{state},false},{handler,ns_node_disco_log,false,{state},false}]\ny(3) ns_node_disco_events\ny(4) <0.77.0>\n\n0x04a83bb4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d8b0 (proc_lib:wake_up/3 + 76)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,194}]}, {heap_size,233}, {total_heap_size,377}, {links,[<0.77.0>]}, {memory,2064}, {message_queue_len,0}, {reductions,50051}, {trap_exit,true}]}, {<0.79.0>, [{registered_name,ns_node_disco}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x055f26b4 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_node_disco\ny(3) {state,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],{interval,#Ref<0.0.0.134>}}\ny(4) ns_node_disco\ny(5) <0.77.0>\n\n0x055f26d0 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3161}]}, {heap_size,46368}, {total_heap_size,57314}, {links,[<0.77.0>,<0.57.0>]}, {memory,229712}, {message_queue_len,0}, {reductions,4048977}, {trap_exit,false}]}, {<0.85.0>, [{registered_name,ns_config_rep}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05c8d494 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_config_rep\ny(3) {state}\ny(4) ns_config_rep\ny(5) <0.77.0>\n\n0x05c8d4b0 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,40}]}, {heap_size,46368}, {total_heap_size,57314}, {links,[<0.77.0>]}, {memory,229692}, {message_queue_len,0}, {reductions,61054}, {trap_exit,false}]}, {<0.90.0>, [{registered_name,ns_heart}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f4d3b0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_heart\ny(3) [{replication,[{\"default\",5.000000e-001}]},{system_memory_data,[{total_memory,4284698624},{free_memory,92594176},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{3290395,0}},{context_switches,{966801,0}},{garbage_collection,{172672,1926558401,0}},{io,{{input,101363188},{output,50652200}}},{reductions,{792350692,811632}},{run_queue,0},{runtime,{46067,32}}]}]\ny(4) ns_heart\ny(5) <0.71.0>\n\n0x00f4d3cc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2644}]}, {heap_size,6765}, {total_heap_size,53133}, {links,[<0.71.0>,<0.57.0>]}, {memory,212988}, {message_queue_len,0}, {reductions,6050002}, {trap_exit,false}]}, {<0.93.0>, [{registered_name,ns_doctor}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f53d70 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_doctor\ny(3) {state,{dict,3,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[['ns_1@10.2.1.100',{last_heard,{1294,91287,764400}},{active_buckets,[\"default\"]},{memory,[{total,22125376},{processes,13685396},{processes_used,13666276},{system,8439980},{atom,560789},{atom_used,558343},{binary,181288},{code,4582580},{ets,1693668}]},{cluster_compatibility_version,1},{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{inets,\"5.2\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,3299},{memory_data,{4284698624,4184846336,{<0.299.0>,6656756}}},{disk_data,[{\"C:\\\",48162864,60},{\"D:\\\",51279476,0},{\"G:\\\",34724465,17}]},{replication,[{\"default\",5.000000e-001}]},{system_memory_data,[{total_memory,4284698624},{free_memory,92594176},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{3290395,0}},{context_switches,{966801,0}},{garbage_collection,{172672,1926558401,0}},{io,{{input,101363188},{output,50652200}}},{reductions,{792350692,811632}},{run_queue,0},{runtime,{46067,32}}]}]],[['ns_1@10.2.1.101',{last_heard,{1294,91287,748402}},{active_buckets,[\"default\"]},{memory,[{total,19416288},{processes,11039004},{processes_used,11028940},{system,8377284},{atom,559813},{atom_used,556363},{binary,246232},{code,4551541},{ets,1606372}]},{cluster_compatibility_version,1},{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{inets,\"5.2\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,3229},{memory_data,{4284698624,4210270208,{<11993.387.0>,5385512}}},{disk_data,[{\"C:\\\",46243100,46},{\"D:\\\",51809624,0},{\"G:\\\",33929248,18}]},{replication,[{\"default\",0.000000e+000}]},{system_memory_data,[{total_memory,4284698624},{free_memory,87326720},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{3222470,1575}},{context_switches,{571544,0}},{garbage_collection,{128134,1245695829,0}},{io,{{input,79617831},{output,38541094}}},{reductions,{396374397,3168310}},{run_queue,0},{runtime,{30981,265}}]}]],[['ns_1@10.2.1.102',{last_heard,{1294,91288,170400}},{active_buckets,[\"default\"]},{memory,[{total,16913040},{processes,8815924},{processes_used,8802956},{system,8097116},{atom,541565},{atom_used,529955},{binary,576704},{code,4290459},{ets,1275748}]},{cluster_compatibility_version,1},{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,2218},{memory_data,{4284698624,3351322624,{<10870.307.0>,4114268}}},{disk_data,[{\"C:\\\",49423972,41},{\"D:\\\",52797620,0},{\"G:\\\",34724465,17}]},{replication,[{\"default\",0.000000e+000}]},{system_memory_data,[{total_memory,4284698624},{free_memory,933093376},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{2210316,0}},{context_switches,{223166,0}},{garbage_collection,{63145,320518908,0}},{io,{{input,23485359},{output,21305805}}},{reductions,{129761022,609670}},{run_queue,0},{runtime,{12058,46}}]}]],[],[],[],[],[],[],[],[],[],[],[],[],[]}}}}\ny(4) ns_doctor\ny(5) <0.71.0>\n\n0x00f53d8c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1180}]}, {heap_size,6765}, {total_heap_size,10946}, {links,[<0.71.0>,<0.57.0>]}, {memory,44240}, {message_queue_len,0}, {reductions,817540}, {trap_exit,false}]}, {<0.95.0>, [{registered_name,menelaus_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a8c0b4 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,menelaus_sup},one_for_one,[{child,<0.609.0>,hot_keys_keeper,{hot_keys_keeper,start_link,[]},permanent,5000,worker,dynamic},{child,undefined,menelaus_event,{menelaus_event,start_link,[]},transient,5000,worker,dynamic},{child,<0.96.0>,menelaus_web,{menelaus_web,start_link,[]},permanent,5000,worker,dynamic}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,10,[{1294,88148,556402},{1294,88143,548401},{1294,88138,541402}],menelaus_sup,[]}\ny(4) menelaus_sup\ny(5) <0.71.0>\n\n0x04a8c0d0 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,6}]}, {heap_size,2584}, {total_heap_size,13530}, {links,[<0.96.0>,<0.609.0>,<0.71.0>]}, {memory,54596}, {message_queue_len,0}, {reductions,3885}, {trap_exit,true}]}, {<0.96.0>, [{registered_name,menelaus_web}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x050cffec Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mochiweb_socket_server\ny(3) {mochiweb_socket_server,8091,#Fun,{local,menelaus_web},2043,{0,0,0,0},#Port<0.1928>,<0.12004.0>,128}\ny(4) menelaus_web\ny(5) <0.95.0>\n\n0x050d0008 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,39}]}, {heap_size,610}, {total_heap_size,1220}, {links, [<0.10944.0>,<0.11987.0>,<0.12004.0>,<0.11252.0>, <0.228.0>,<0.10782.0>,<0.95.0>,#Port<0.1928>]}, {memory,5456}, {message_queue_len,0}, {reductions,10871}, {trap_exit,true}]}, {<0.100.0>, [{registered_name,ns_port_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03139848 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_port_sup},one_for_one,[{child,<0.300.0>,{moxi,\"./bin/moxi/moxi\",[\"-Z\",\"port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",\"-z\",\"url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming\",\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",\"Administrator\"},{\"MOXI_SASL_PLAIN_PWD\",\"j4958ph\"}]},use_stdio,stderr_to_stdout,stream]},{supervisor_cushion,start_link,[moxi,5000,ns_port_server,start_link,[moxi,\"./bin/moxi/moxi\",[\"-Z\",\"port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",\"-z\",\"url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming\",\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",\"Administrator\"},{\"MOXI_SASL_PLAIN_PWD\",\"j4958ph\"}]},use_stdio,stderr_to_stdout,stream]]]},permanent,10,worker,[ns_port_server]},{child,<0.104.0>,{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",\"11210\",\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",\"admin=_admin;default_bucket_name=default;auto_create=false\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]},{supervisor_cushion,start_link,[memcached,5000,ns_port_server,start_link,[memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",\"11210\",\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",\"admin=_admin;default_bucket_name=default;auto_create=false\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]]]},permanent,10,worker,[ns_port_server]},{child,undefined,ns_port_init,{ns_port_init,start_link,[]},transient,10,worker,[]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_port_sup,[]}\ny(4) ns_port_sup\ny(5) <0.71.0>\n\n0x03139864 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,28657}, {total_heap_size,39603}, {links,[<0.104.0>,<0.300.0>,<0.71.0>]}, {memory,158888}, {message_queue_len,0}, {reductions,6655}, {trap_exit,true}]}, {<0.104.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03089d30 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_cushion\ny(3) {state,memcached,5000,{1294,87988,446000},<0.105.0>}\ny(4) <0.104.0>\ny(5) <0.100.0>\n\n0x03089d4c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,987}, {total_heap_size,987}, {links,[<0.100.0>,<0.105.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,55}, {trap_exit,true}]}, {<0.105.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d18f98 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_port_server\ny(3) {state,#Port<0.1998>,memcached,{[\"Suspend eq_tapq:anon_689 for 1.00 secs\",\"Suspend eq_tapq:anon_689 for 1.00 secs\"],[\"Suspend eq_tapq:anon_689 for 1.00 secs\"]},{ok,{1294091288967401,#Ref<0.0.0.246355>}},[\"Suspend eq_tapq:anon_689 for 1.00 secs\"],0}\ny(4) <0.105.0>\ny(5) <0.104.0>\n\n0x00d18fb4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,69}]}, {heap_size,6765}, {total_heap_size,17711}, {links,[<0.104.0>,#Port<0.1998>]}, {memory,71300}, {message_queue_len,0}, {reductions,84187}, {trap_exit,true}]}, {<0.106.0>, [{registered_name,ns_tick_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00ef4db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a84530 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,ns_pubsub,#Ref<0.0.0.1278>,{state,#Fun,ignored},<0.259.0>}]\ny(3) ns_tick_event\ny(4) <0.71.0>\n\n0x04a84548 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,172}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.71.0>,<0.259.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,36107}, {trap_exit,true}]}, {<0.107.0>, [{registered_name,ns_stats_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00ef4db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05b38748 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,ns_pubsub,#Ref<0.0.0.1494>,{state,#Fun,ignored},<0.262.0>}]\ny(3) ns_stats_event\ny(4) <0.71.0>\n\n0x05b38760 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1195}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.262.0>,<0.71.0>]}, {memory,12300}, {message_queue_len,0}, {reductions,140475}, {trap_exit,true}]}, {<0.108.0>, [{registered_name,ns_good_bucket_worker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a854a0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) work_queue\ny(3) []\ny(4) ns_good_bucket_worker\ny(5) <0.71.0>\n\n0x04a854bc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.71.0>]}, {memory,6824}, {message_queue_len,0}, {reductions,381}, {trap_exit,false}]}, {<0.109.0>, [{registered_name,ns_good_bucket_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0314b460 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_good_bucket_sup},one_for_one,[{child,<0.322.0>,{ns_memcached,\"default\"},{ns_memcached,start_link,[\"default\"]},permanent,86400000,worker,[ns_memcached]},{child,<0.260.0>,{ns_vbm_sup,\"default\"},{ns_vbm_sup,start_link,[\"default\"]},permanent,1000,worker,[ns_vbm_sup]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},3,10,[{1294,88073,519400}],ns_bucket_sup,{ns_good_bucket_sup,#Fun,ns_good_bucket_worker}}\ny(4) ns_good_bucket_sup\ny(5) <0.71.0>\n\n0x0314b47c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,6765}, {total_heap_size,6765}, {links,[<0.71.0>,<0.260.0>,<0.322.0>,<0.65.0>]}, {memory,27556}, {message_queue_len,0}, {reductions,847}, {trap_exit,true}]}, {<0.110.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x044603bc (gen_fsm:loop/7 + 156)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f6b0c4 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_orchestrator\ny(3) {janitor_state,[\"default\"],<0.11992.0>}\ny(4) janitor_running\ny(5) ns_orchestrator\ny(6) <0.71.0>\n\n0x00f6b0e4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,338}]}, {heap_size,4181}, {total_heap_size,15127}, {links, [<0.71.0>,<11993.315.0>,<0.11992.0>,<10870.235.0>, <0.57.0>]}, {memory,61224}, {message_queue_len,0}, {reductions,1179367}, {trap_exit,true}]}, {<0.111.0>, [{registered_name,ns_mnesia}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030f56e0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_mnesia\ny(3) {state}\ny(4) ns_mnesia\ny(5) <0.71.0>\n\n0x030f56fc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,1597}, {total_heap_size,4181}, {links,[<0.119.0>,<0.71.0>]}, {memory,17180}, {message_queue_len,0}, {reductions,1639}, {trap_exit,true}]}, {<0.113.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x032afdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f9dda4 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.114.0>,{appl_data,mnesia,[mnesia_dumper_load_regulator,mnesia_event,mnesia_fallback,mnesia_controller,mnesia_kernel_sup,mnesia_late_loader,mnesia_locker,mnesia_monitor,mnesia_recover,mnesia_substr,mnesia_sup,mnesia_tm],undefined,{mnesia_sup,[]},[mnesia,mnesia_backup,mnesia_bup,mnesia_checkpoint,mnesia_checkpoint_sup,mnesia_controller,mnesia_dumper,mnesia_event,mnesia_frag,mnesia_frag_hash,mnesia_frag_old_hash,mnesia_index,mnesia_kernel_sup,mnesia_late_loader,mnesia_lib,mnesia_loader,mnesia_locker,mnesia_log,mnesia_monitor,mnesia_recover,mnesia_registry,mnesia_schema,mnesia_snmp_hook,mnesia_snmp_sup,mnesia_subscr,mnesia_sup,mnesia_sp,mnesia_text,mnesia_tm],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x00f9ddb4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.7.0>,<0.114.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,82}, {trap_exit,true}]}, {<0.114.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032b10fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030a3460 Return addr 0x00bdc194 ()\ny(0) {normal,[]}\ny(1) mnesia_sup\ny(2) <0.115.0>\ny(3) <0.113.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.113.0>,<0.115.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,40}, {trap_exit,true}]}, {<0.115.0>, [{registered_name,mnesia_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030f62c0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_sup},one_for_all,[{child,<0.117.0>,mnesia_kernel_sup,{mnesia_kernel_sup,start,[]},permanent,infinity,supervisor,[mnesia_kernel_sup,supervisor]},{child,<0.116.0>,mnesia_event,{mnesia_sup,start_event,[]},permanent,30000,worker,[mnesia_event,gen_event]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,3600,[],mnesia_sup,[[]]}\ny(4) mnesia_sup\ny(5) <0.114.0>\n\n0x030f62dc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.116.0>,<0.117.0>,<0.114.0>]}, {memory,3492}, {message_queue_len,0}, {reductions,207}, {trap_exit,true}]}, {<0.116.0>, [{registered_name,mnesia_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00ef4db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0314e660 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,mnesia_event,false,{state,[],false,[]},false}]\ny(3) mnesia_event\ny(4) <0.115.0>\n\n0x0314e678 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,6}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.115.0>,<0.119.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,454}, {trap_exit,true}]}, {<0.117.0>, [{registered_name,mnesia_kernel_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f74e7c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_kernel_sup},one_for_all,[{child,<0.126.0>,mnesia_late_loader,{mnesia_late_loader,start,[]},permanent,3000,worker,[mnesia_late_loader,mnesia_monitor,proc_lib]},{child,<0.125.0>,mnesia_controller,{mnesia_controller,start,[]},permanent,3000,worker,[mnesia_controller,gen_server]},{child,<0.124.0>,mnesia_snmp_sup,{mnesia_snmp_sup,start,[]},permanent,infinity,supervisor,[mnesia_snmp_sup,supervisor]},{child,<0.123.0>,mnesia_checkpoint_sup,{mnesia_checkpoint_sup,start,[]},permanent,infinity,supervisor,[mnesia_checkpoint_sup,supervisor]},{child,<0.122.0>,mnesia_tm,{mnesia_tm,start,[]},permanent,30000,worker,[mnesia_tm,mnesia_monitor,proc_lib]},{child,<0.121.0>,mnesia_recover,{mnesia_recover,start,[]},permanent,180000,worker,[mnesia_recover,gen_server]},{child,<0.120.0>,mnesia_locker,{mnesia_locker,start,[]},permanent,3000,worker,[mnesia_locker,mnesia_monitor,proc_lib]},{child,<0.119.0>,mnesia_subscr,{mnesia_subscr,start,[]},permanent,3000,worker,[mnesia_subscr,gen_server]},{child,<0.118.0>,mnesia_monitor,{mnesia_monitor,start,[]},permanent,3000,worker,[mnesia_monitor,gen_server]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,86400000,[],mnesia_kernel_sup,[]}\ny(4) mnesia_kernel_sup\ny(5) <0.115.0>\n\n0x00f74e98 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,8}]}, {heap_size,610}, {total_heap_size,987}, {links, [<0.120.0>,<0.124.0>,<0.125.0>,<0.126.0>,<0.122.0>, <0.123.0>,<0.121.0>,<0.118.0>,<0.119.0>,<0.115.0>]}, {memory,4564}, {message_queue_len,0}, {reductions,599}, {trap_exit,true}]}, {<0.118.0>, [{registered_name,mnesia_monitor}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f5b6a8 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_monitor\ny(3) {state,<0.117.0>,[],[],true,[],undefined,[]}\ny(4) mnesia_monitor\ny(5) <0.117.0>\n\n0x00f5b6c4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,46}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.131.0>,<0.117.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,9624}, {trap_exit,true}]}, {<0.119.0>, [{registered_name,mnesia_subscr}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030a30a0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_subscr\ny(3) {state,<0.117.0>,20502}\ny(4) mnesia_subscr\ny(5) <0.117.0>\n\n0x030a30bc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.116.0>,<0.117.0>,<0.111.0>]}, {memory,1408}, {message_queue_len,0}, {reductions,111}, {trap_exit,true}]}, {<0.120.0>, [{registered_name,mnesia_locker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x044bb9d8 (mnesia_locker:loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a8f294 Return addr 0x0448f438 (mnesia_sp:init_proc/4 + 132)\ny(0) []\ny(1) []\ny(2) []\ny(3) []\ny(4) []\ny(5) {state,<0.117.0>}\n\n0x04a8f2b0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) Catch 0x0448f438 (mnesia_sp:init_proc/4 + 132)\ny(1) mnesia_locker\ny(2) []\ny(3) []\ny(4) [<0.117.0>]\n\n0x04a8f2c8 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,956}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.117.0>]}, {memory,12280}, {message_queue_len,0}, {reductions,479677}, {trap_exit,true}]}, {<0.121.0>, [{registered_name,mnesia_recover}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05b02168 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_recover\ny(3) {state,<0.117.0>,undefined,undefined,undefined,0,true,[]}\ny(4) mnesia_recover\ny(5) <0.117.0>\n\n0x05b02184 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,12}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.117.0>,<0.57.0>]}, {memory,5912}, {message_queue_len,0}, {reductions,6325}, {trap_exit,true}]}, {<0.122.0>, [{registered_name,mnesia_tm}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x044c3e04 (mnesia_tm:doit_loop/1 + 108)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a80f60 Return addr 0x0448f438 (mnesia_sp:init_proc/4 + 132)\ny(0) []\ny(1) []\ny(2) {state,{0,nil},{0,nil},<0.117.0>,[],[],[]}\ny(3) []\ny(4) []\ny(5) <0.117.0>\ny(6) {0,nil}\ny(7) {0,nil}\n\n0x04a80f84 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) Catch 0x0448f438 (mnesia_sp:init_proc/4 + 132)\ny(1) mnesia_tm\ny(2) []\ny(3) []\ny(4) [<0.117.0>]\n\n0x04a80f9c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,598}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.117.0>]}, {memory,6824}, {message_queue_len,0}, {reductions,1091793}, {trap_exit,true}]}, {<0.123.0>, [{registered_name,mnesia_checkpoint_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f762e0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_checkpoint_sup},simple_one_for_one,[{child,undefined,mnesia_checkpoint_sup,{mnesia_checkpoint,start,[]},transient,3000,worker,[mnesia_checkpoint_sup,mnesia_checkpoint,supervisor]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,86400000,[],mnesia_checkpoint_sup,[]}\ny(4) mnesia_checkpoint_sup\ny(5) <0.117.0>\n\n0x00f762fc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.117.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,61}, {trap_exit,true}]}, {<0.124.0>, [{registered_name,mnesia_snmp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7c310 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_snmp_sup},simple_one_for_one,[{child,undefined,mnesia_snmp_sup,{mnesia_snmp_hook,start,[]},transient,3000,worker,[mnesia_snmp_sup,mnesia_snmp_hook,supervisor]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,86400000,[],mnesia_snmp_sup,[]}\ny(4) mnesia_snmp_sup\ny(5) <0.117.0>\n\n0x00f7c32c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.117.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,61}, {trap_exit,true}]}, {<0.125.0>, [{registered_name,mnesia_controller}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04acca88 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_controller\ny(3) {state,<0.117.0>,true,[],[],{0,nil},[],[],{0,nil},undefined,[],[],{interval,#Ref<0.0.0.323>},false}\ny(4) mnesia_controller\ny(5) <0.117.0>\n\n0x04accaa4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,4}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.117.0>,<0.57.0>]}, {memory,5912}, {message_queue_len,0}, {reductions,1087}, {trap_exit,true}]}, {<0.126.0>, [{registered_name,mnesia_late_loader}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0451f47c (mnesia_late_loader:loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f73eec Return addr 0x0448f438 (mnesia_sp:init_proc/4 + 132)\ny(0) []\ny(1) []\ny(2) {state,<0.117.0>}\n\n0x00f73efc Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) Catch 0x0448f438 (mnesia_sp:init_proc/4 + 132)\ny(1) mnesia_late_loader\ny(2) []\ny(3) []\ny(4) [<0.117.0>]\n\n0x00f73f14 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<0.117.0>]}, {memory,1944}, {message_queue_len,0}, {reductions,178}, {trap_exit,false}]}, {<0.129.0>, [{registered_name,disk_log_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05b0b988 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,disk_log_sup},simple_one_for_one,[{child,undefined,disk_log,{disk_log,istart_link,[]},temporary,1000,worker,[disk_log]}],{dict,1,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[[<0.131.0>,<0.130.0>]],[],[],[],[],[],[],[],[],[],[],[],[]}}},4,3600,[],disk_log_sup,[]}\ny(4) disk_log_sup\ny(5) <0.34.0>\n\n0x05b0b9a4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,25}]}, {heap_size,377}, {total_heap_size,987}, {links,[<0.131.0>,<0.34.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,7283}, {trap_exit,true}]}, {<0.130.0>, [{registered_name,disk_log_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05b3c43c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) disk_log_server\ny(3) {state,[]}\ny(4) disk_log_server\ny(5) <0.34.0>\n\n0x05b3c458 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,33}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.131.0>,<0.34.0>]}, {memory,14740}, {message_queue_len,0}, {reductions,8451}, {trap_exit,true}]}, {<0.131.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x049853e8 (disk_log:loop/1 + 84)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x059081d4 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) {state,[],[],<0.129.0>,<0.130.0>,151,{arg,latest_log,undefined,\"c:/Program Files/Membase/Server/Mnesia.ns_1@10.2.1.100/LATEST.LOG\",true,infinity,halt,false,internal,<0.118.0>,none,read_write,true,[{notify,true},{file,\"c:/Program Files/Membase/Server/Mnesia.ns_1@10.2.1.100/LATEST.LOG\"},{name,latest_log},{repair,true},{mode,read_write}]},ok,ok}\n\n0x059081dc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,16}]}, {heap_size,1597}, {total_heap_size,4181}, {links,[<0.129.0>,<0.130.0>,<0.118.0>,#Port<0.6833>]}, {memory,17220}, {message_queue_len,0}, {reductions,346982}, {trap_exit,true}]}, {<0.134.0>, [{registered_name,dets_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0314f238 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,dets_sup},simple_one_for_one,[{child,undefined,dets,{dets,istart_link,[]},temporary,30000,worker,[dets]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},4,3600,[],dets_sup,[]}\ny(4) dets_sup\ny(5) <0.34.0>\n\n0x0314f254 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,5}]}, {heap_size,377}, {total_heap_size,987}, {links,[<0.34.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,596}, {trap_exit,true}]}, {<0.135.0>, [{registered_name,dets}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a8ca44 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) dets_server\ny(3) {state,40995,[<0.34.0>],[]}\ny(4) dets\ny(5) <0.34.0>\n\n0x04a8ca60 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,8}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.34.0>]}, {memory,6824}, {message_queue_len,0}, {reductions,965}, {trap_exit,true}]}, {<0.149.0>, [{registered_name,ns_bad_bucket_worker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b17bc8 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) work_queue\ny(3) []\ny(4) ns_bad_bucket_worker\ny(5) <0.71.0>\n\n0x04b17be4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.71.0>]}, {memory,5892}, {message_queue_len,0}, {reductions,421}, {trap_exit,false}]}, {<0.150.0>, [{registered_name,ns_bad_bucket_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04685a90 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_bad_bucket_sup},one_for_one,[{child,<0.299.0>,{stats_reader,\"default\"},{stats_reader,start_link,[\"default\"]},permanent,10,worker,[stats_reader]},{child,<0.262.0>,{stats_archiver,\"default\"},{stats_archiver,start_link,[\"default\"]},permanent,10,worker,[stats_archiver]},{child,<0.259.0>,{stats_collector,\"default\"},{stats_collector,start_link,[\"default\"]},permanent,10,worker,[stats_collector]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},3,10,[],ns_bucket_sup,{ns_bad_bucket_sup,#Fun,ns_bad_bucket_worker}}\ny(4) ns_bad_bucket_sup\ny(5) <0.71.0>\n\n0x04685aac Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,6765}, {total_heap_size,6765}, {links, [<0.71.0>,<0.262.0>,<0.299.0>,<0.259.0>,<0.65.0>]}, {memory,27576}, {message_queue_len,0}, {reductions,888}, {trap_exit,true}]}, {<0.151.0>, [{registered_name,ns_moxi_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x06a195c8 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_moxi_sup},one_for_one,[],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_moxi_sup,[]}\ny(4) ns_moxi_sup\ny(5) <0.71.0>\n\n0x06a195e4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,1597}, {total_heap_size,12543}, {links,[<0.71.0>,<0.65.0>]}, {memory,50628}, {message_queue_len,0}, {reductions,1910}, {trap_exit,true}]}, {<0.152.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05622874 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_tick\ny(3) {state,1294091287748}\ny(4) ns_tick\ny(5) <0.71.0>\n\n0x05622890 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,214}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.71.0>,<11993.390.0>,<10870.310.0>,<0.57.0>]}, {memory,12540}, {message_queue_len,0}, {reductions,141677}, {trap_exit,false}]}, {<0.228.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x04438f08 (menelaus_web:handle_streaming/4 + 196)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x06133d34 Return addr 0x044372f8 (menelaus_web:loop/3 + 12136)\ny(0) {struct,[{buckets,[{struct,[{name,<<7 bytes>>},{nodeLocator,vbucket},{saslPassword,<<0 bytes>>},{nodes,[{struct,[{replication,5.000000e-001},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]}]},{vBucketServerMap,{struct,[{hashAlgorithm,<<3 bytes>>},{numReplicas,1},{serverList,[<<16 bytes>>,<<16 bytes>>,<<16 bytes>>]},{vBucketMap,[[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1]]}]}}]}]}]}\ny(1) {struct,[{buckets,[{struct,[{name,<<7 bytes>>},{nodeLocator,vbucket},{saslPassword,<<0 bytes>>},{nodes,[{struct,[{replication,5.000000e-001},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]}]},{vBucketServerMap,{struct,[{hashAlgorithm,<<3 bytes>>},{numReplicas,1},{serverList,[<<16 bytes>>,<<16 bytes>>,<<16 bytes>>]},{vBucketMap,[[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1]]}]}}]}]}]}\ny(2) {mochiweb_response,{mochiweb_request,#Port<0.3418>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}},200,{6,{\"pragma\",{\"Pragma\",\"no-cache\"},{\"cache-control\",{\"Cache-Control\",\"no-cache no-store max-age=0\"},nil,{\"content-type\",{\"Content-Type\",\"application/json; charset=utf-8\"},nil,{\"date\",{\"Date\",\"Mon, 03 Jan 2011 20:54:29 GMT\"},nil,nil}}},{\"server\",{\"Server\",\"Membase Server 1.6.5r\"},nil,{\"transfer-encoding\",{\"Transfer-Encoding\",\"chunked\"},nil,nil}}}}}\ny(3) {mochiweb_request,#Port<0.3418>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}}\ny(4) #Fun\n\n0x06133d4c Return addr 0x04443be8 (mochiweb_http:headers/5 + 680)\ny(0) []\ny(1) []\ny(2) []\ny(3) []\ny(4) {mochiweb_request,#Port<0.3418>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}}\ny(5) Catch 0x04437308 (menelaus_web:loop/3 + 12152)\n\n0x06133d68 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) []\ny(2) []\ny(3) {mochiweb_request,#Port<0.3418>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}}\n\n0x06133d7c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,120}]}, {heap_size,121393}, {total_heap_size,317811}, {links,[<0.96.0>,#Port<0.3418>]}, {memory,1271780}, {message_queue_len,0}, {reductions,13817997}, {trap_exit,false}]}, {<0.259.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f5a730 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) stats_collector\ny(3) {state,\"default\",[6871627861,213398232,0,0,0,0,4548601,0,0,0,0,0,0,0,0,820134,5193971,2920982,854355,0,0,0,813878,1250498,0],37,1294091287748}\ny(4) <0.259.0>\ny(5) <0.150.0>\n\n0x00f5a74c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,18}]}, {heap_size,6765}, {total_heap_size,17711}, {links,[<0.150.0>,<0.106.0>]}, {memory,71300}, {message_queue_len,0}, {reductions,23051866}, {trap_exit,false}]}, {<0.260.0>, [{registered_name,'ns_vbm_sup-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x06a0d1b8 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,'ns_vbm_sup-default'},one_for_one,[{child,<0.11394.0>,{child_id,[1023,1022,1021,1020,1019,1018,1017,1016,1015,1014,1013,1012,1011,1010,1009,1008,1007,1006,1005,1004,1003,1002,1001,1000,999,998,997,996,995,994,993,992,991,990,989,988,987,986,985,984,983,982,981,980,979,978,977,976,975,974,973,972,971,970,969,968,967,966,965,964,963,962,961,960,959,958,957,956,955,954,953,952,951,950,949,948,947,946,945,944,943,942,941,940,939,938,937,936,935,934,933,932,931,930,929,928,927,926,925,924,923,922,921,920,919,918,917,916,915,914,913,912,911,910,909,908,907,906,905,904,903,902,901,900,899,898,897,896,895,894,893,892,891,890,889,888,887,886,885,884,883,882,881,880,879,878,877,876,875,874,873,872,871,870,869,868,867,866,865,864,863,862,861,860,859,858,857,856,855,854,853,852,851,850,849,848,847,846,845,844,843,842,841,840,839,838,837,836,835,834,833,832,831,830,829,828,827,826,825,824,823,822,821,820,819,818,817,816,815,814,813,812,811,810,809,808,807,806,805,804,803,802,801,800,799,798,797,796,795,794,793,792,791,790,789,788,787,786,785,784,783,782,781,780,779,778,777,776,775,774,773,772,771,770,769,768,767,766,765,764,763,762,761,760,759,758,757,756,755,754,753,752,751,750,749,748,747,746,745,744,743,742,741,740,739,738,737,736,735,734,733,732,731,730,729,728,727,726,725,724,723,722,721,720,719,718,717,716,715,714,713,712,711,710,709,708,707,706,705,704,703,702,701,700,699,698,697,696,695,694,693,692,691,690,689,688,687,686,685,684,683],'ns_1@10.2.1.101'},{ns_port_server,start_link,[vbucketmigrator,\"./bin/vbucketmigrator/vbucketmigrator\",[\"-e\",\"-a\",\"default\",\"-h\",\"10.2.1.100:11210\",\"-d\",\"10.2.1.101:11210\",\"-A\",\"-v\",\"-b\",\"1023\",\"-b\",\"1022\",\"-b\",\"1021\",\"-b\",\"1020\",\"-b\",\"1019\",\"-b\",\"1018\",\"-b\",\"1017\",\"-b\",\"1016\",\"-b\",\"1015\",\"-b\",\"1014\",\"-b\",\"1013\",\"-b\",\"1012\",\"-b\",\"1011\",\"-b\",\"1010\",\"-b\",\"1009\",\"-b\",\"1008\",\"-b\",\"1007\",\"-b\",\"1006\",\"-b\",\"1005\",\"-b\",\"1004\",\"-b\",\"1003\",\"-b\",\"1002\",\"-b\",\"1001\",\"-b\",\"1000\",\"-b\",\"999\",\"-b\",\"998\",\"-b\",\"997\",\"-b\",\"996\",\"-b\",\"995\",\"-b\",\"994\",\"-b\",\"993\",\"-b\",\"992\",\"-b\",\"991\",\"-b\",\"990\",\"-b\",\"989\",\"-b\",\"988\",\"-b\",\"987\",\"-b\",\"986\",\"-b\",\"985\",\"-b\",\"984\",\"-b\",\"983\",\"-b\",\"982\",\"-b\",\"981\",\"-b\",\"980\",\"-b\",\"979\",\"-b\",\"978\",\"-b\",\"977\",\"-b\",\"976\",\"-b\",\"975\",\"-b\",\"974\",\"-b\",\"973\",\"-b\",\"972\",\"-b\",\"971\",\"-b\",\"970\",\"-b\",\"969\",\"-b\",\"968\",\"-b\",\"967\",\"-b\",\"966\",\"-b\",\"965\",\"-b\",\"964\",\"-b\",\"963\",\"-b\",\"962\",\"-b\",\"961\",\"-b\",\"960\",\"-b\",\"959\",\"-b\",\"958\",\"-b\",\"957\",\"-b\",\"956\",\"-b\",\"955\",\"-b\",\"954\",\"-b\",\"953\",\"-b\",\"952\",\"-b\",\"951\",\"-b\",\"950\",\"-b\",\"949\",\"-b\",\"948\",\"-b\",\"947\",\"-b\",\"946\",\"-b\",\"945\",\"-b\",\"944\",\"-b\",\"943\",\"-b\",\"942\",\"-b\",\"941\",\"-b\",\"940\",\"-b\",\"939\",\"-b\",\"938\",\"-b\",\"937\",\"-b\",\"936\",\"-b\",\"935\",\"-b\",\"934\",\"-b\",\"933\",\"-b\",\"932\",\"-b\",\"931\",\"-b\",\"930\",\"-b\",\"929\",\"-b\",\"928\",\"-b\",\"927\",\"-b\",\"926\",\"-b\",\"925\",\"-b\",\"924\",\"-b\",\"923\",\"-b\",\"922\",\"-b\",\"921\",\"-b\",\"920\",\"-b\",\"919\",\"-b\",\"918\",\"-b\",\"917\",\"-b\",\"916\",\"-b\",\"915\",\"-b\",\"914\",\"-b\",\"913\",\"-b\",\"912\",\"-b\",\"911\",\"-b\",\"910\",\"-b\",\"909\",\"-b\",\"908\",\"-b\",\"907\",\"-b\",\"906\",\"-b\",\"905\",\"-b\",\"904\",\"-b\",\"903\",\"-b\",\"902\",\"-b\",\"901\",\"-b\",\"900\",\"-b\",\"899\",\"-b\",\"898\",\"-b\",\"897\",\"-b\",\"896\",\"-b\",\"895\",\"-b\",\"894\",\"-b\",\"893\",\"-b\",\"892\",\"-b\",\"891\",\"-b\",\"890\",\"-b\",\"889\",\"-b\",\"888\",\"-b\",\"887\",\"-b\",\"886\",\"-b\",\"885\",\"-b\",\"884\",\"-b\",\"883\",\"-b\",\"882\",\"-b\",\"881\",\"-b\",\"880\",\"-b\",\"879\",\"-b\",\"878\",\"-b\",\"877\",\"-b\",\"876\",\"-b\",\"875\",\"-b\",\"874\",\"-b\",\"873\",\"-b\",\"872\",\"-b\",\"871\",\"-b\",\"870\",\"-b\",\"869\",\"-b\",\"868\",\"-b\",\"867\",\"-b\",\"866\",\"-b\",\"865\",\"-b\",\"864\",\"-b\",\"863\",\"-b\",\"862\",\"-b\",\"861\",\"-b\",\"860\",\"-b\",\"859\",\"-b\",\"858\",\"-b\",\"857\",\"-b\",\"856\",\"-b\",\"855\",\"-b\",\"854\",\"-b\",\"853\",\"-b\",\"852\",\"-b\",\"851\",\"-b\",\"850\",\"-b\",\"849\",\"-b\",\"848\",\"-b\",\"847\",\"-b\",\"846\",\"-b\",\"845\",\"-b\",\"844\",\"-b\",\"843\",\"-b\",\"842\",\"-b\",\"841\",\"-b\",\"840\",\"-b\",\"839\",\"-b\",\"838\",\"-b\",\"837\",\"-b\",\"836\",\"-b\",\"835\",\"-b\",\"834\",\"-b\",\"833\",\"-b\",\"832\",\"-b\",\"831\",\"-b\",\"830\",\"-b\",\"829\",\"-b\",\"828\",\"-b\",\"827\",\"-b\",\"826\",\"-b\",\"825\",\"-b\",\"824\",\"-b\",\"823\",\"-b\",\"822\",\"-b\",\"821\",\"-b\",\"820\",\"-b\",\"819\",\"-b\",\"818\",\"-b\",\"817\",\"-b\",\"816\",\"-b\",\"815\",\"-b\",\"814\",\"-b\",\"813\",\"-b\",\"812\",\"-b\",\"811\",\"-b\",\"810\",\"-b\",\"809\",\"-b\",\"808\",\"-b\",\"807\",\"-b\",\"806\",\"-b\",\"805\",\"-b\",\"804\",\"-b\",\"803\",\"-b\",\"802\",\"-b\",\"801\",\"-b\",\"800\",\"-b\",\"799\",\"-b\",\"798\",\"-b\",\"797\",\"-b\",\"796\",\"-b\",\"795\",\"-b\",\"794\",\"-b\",\"793\",\"-b\",\"792\",\"-b\",\"791\",\"-b\",\"790\",\"-b\",\"789\",\"-b\",\"788\",\"-b\",\"787\",\"-b\",\"786\",\"-b\",\"785\",\"-b\",\"784\",\"-b\",\"783\",\"-b\",\"782\",\"-b\",\"781\",\"-b\",\"780\",\"-b\",\"779\",\"-b\",\"778\",\"-b\",\"777\",\"-b\",\"776\",\"-b\",\"775\",\"-b\",\"774\",\"-b\",\"773\",\"-b\",\"772\",\"-b\",\"771\",\"-b\",\"770\",\"-b\",\"769\",\"-b\",\"768\",\"-b\",\"767\",\"-b\",\"766\",\"-b\",\"765\",\"-b\",\"764\",\"-b\",\"763\",\"-b\",\"762\",\"-b\",\"761\",\"-b\",\"760\",\"-b\",\"759\",\"-b\",\"758\",\"-b\",\"757\",\"-b\",\"756\",\"-b\",\"755\",\"-b\",\"754\",\"-b\",\"753\",\"-b\",\"752\",\"-b\",\"751\",\"-b\",\"750\",\"-b\",\"749\",\"-b\",\"748\",\"-b\",\"747\",\"-b\",\"746\",\"-b\",\"745\",\"-b\",\"744\",\"-b\",\"743\",\"-b\",\"742\",\"-b\",\"741\",\"-b\",\"740\",\"-b\",\"739\",\"-b\",\"738\",\"-b\",\"737\",\"-b\",\"736\",\"-b\",\"735\",\"-b\",\"734\",\"-b\",\"733\",\"-b\",\"732\",\"-b\",\"731\",\"-b\",\"730\",\"-b\",\"729\",\"-b\",\"728\",\"-b\",\"727\",\"-b\",\"726\",\"-b\",\"725\",\"-b\",\"724\",\"-b\",\"723\",\"-b\",\"722\",\"-b\",\"721\",\"-b\",\"720\",\"-b\",\"719\",\"-b\",\"718\",\"-b\",\"717\",\"-b\",\"716\",\"-b\",\"715\",\"-b\",\"714\",\"-b\",\"713\",\"-b\",\"712\",\"-b\",\"711\",\"-b\",\"710\",\"-b\",\"709\",\"-b\",\"708\",\"-b\",\"707\",\"-b\",\"706\",\"-b\",\"705\",\"-b\",\"704\",\"-b\",\"703\",\"-b\",\"702\",\"-b\",\"701\",\"-b\",\"700\",\"-b\",\"699\",\"-b\",\"698\",\"-b\",\"697\",\"-b\",\"696\",\"-b\",\"695\",\"-b\",\"694\",\"-b\",\"693\",\"-b\",\"692\",\"-b\",\"691\",\"-b\",\"690\",\"-b\",\"689\",\"-b\",\"688\",\"-b\",\"687\",\"-b\",\"686\",\"-b\",\"685\",\"-b\",\"684\",\"-b\",\"683\"],[use_stdio,stderr_to_stdout,{write_data,[[],\"\\n\"]}]]},permanent,10,worker,[ns_port_server]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_vbm_sup,[]}\ny(4) 'ns_vbm_sup-default'\ny(5) <0.109.0>\n\n0x06a0d1d4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,28657}, {total_heap_size,57314}, {links,[<0.109.0>,<0.11394.0>]}, {memory,229712}, {message_queue_len,0}, {reductions,7803}, {trap_exit,true}]}, {<0.262.0>, [{registered_name,'stats_archiver-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f44190 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) stats_archiver\ny(3) {state,\"default\"}\ny(4) 'stats_archiver-default'\ny(5) <0.150.0>\n\n0x00f441ac Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,50}]}, {heap_size,4181}, {total_heap_size,15127}, {links,[<0.107.0>,<0.150.0>,<0.57.0>]}, {memory,60984}, {message_queue_len,0}, {reductions,16813545}, {trap_exit,false}]}, {<0.299.0>, [{registered_name,'stats_reader-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x062d65e0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) stats_reader\ny(3) {state,\"default\"}\ny(4) 'stats_reader-default'\ny(5) <0.150.0>\n\n0x062d65fc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,108}]}, {heap_size,317811}, {total_heap_size,1664080}, {links,[<0.150.0>]}, {memory,6656756}, {message_queue_len,0}, {reductions,8089605}, {trap_exit,false}]}, {<0.300.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f66f68 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_cushion\ny(3) {state,moxi,5000,{1294,88069,432400},<0.301.0>}\ny(4) <0.300.0>\ny(5) <0.100.0>\n\n0x00f66f84 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,1597}, {total_heap_size,1597}, {links,[<0.100.0>,<0.301.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,149}, {trap_exit,true}]}, {<0.301.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0467d0a4 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_port_server\ny(3) {state,#Port<0.3417>,moxi,{[\"2011-01-03 12:54:29: (cproxy_config.c.325) env: MOXI_SASL_PLAIN_PWD (7)\",\"2011-01-03 12:54:29: (cproxy_config.c.316) env: MOXI_SASL_PLAIN_USR (13)\"],[empty]},undefined,[],0}\ny(4) <0.301.0>\ny(5) <0.300.0>\n\n0x0467d0c0 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,2584}, {total_heap_size,5168}, {links,[<0.300.0>,#Port<0.3417>]}, {memory,21128}, {message_queue_len,0}, {reductions,305}, {trap_exit,true}]}, {<0.322.0>, [{registered_name,'ns_memcached-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x058fbda4 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_memcached\ny(3) {state,\"default\",#Port<0.4370>}\ny(4) <0.322.0>\ny(5) <0.109.0>\n\n0x058fbdc0 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,126}]}, {heap_size,10946}, {total_heap_size,85971}, {links, [#Port<0.4329>,#Port<0.4363>,<0.57.0>,<0.109.0>, #Port<0.4370>,#Port<0.4348>,#Port<0.4356>, #Port<0.4340>,#Port<0.3448>,#Port<0.3461>, #Port<0.4315>,#Port<0.3454>,#Port<0.3433>, #Port<0.3441>,#Port<0.3429>]}, {memory,344600}, {message_queue_len,0}, {reductions,105919253}, {trap_exit,true}]}, {<0.427.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x032afdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b18f1c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.428.0>,{appl_data,inets,[inets_sup,httpc_manager],undefined,{inets_app,[]},[inets,inets_sup,inets_app,inets_service,inets_regexp,ftp,ftp_progress,ftp_response,ftp_sup,http,httpc_handler,httpc_handler_sup,httpc_manager,httpc_profile_sup,httpc_request,httpc_response,httpc_sup,http_cookie,http_uri,http_chunk,http_request,http_response,http_transport,http_util,httpd,httpd_acceptor,httpd_acceptor_sup,httpd_cgi,httpd_conf,httpd_esi,httpd_example,httpd_file,httpd_instance_sup,httpd_log,httpd_manager,httpd_misc_sup,httpd_request,httpd_request_handler,httpd_response,httpd_script_env,httpd_socket,httpd_sup,httpd_util,mod_actions,mod_alias,mod_auth,mod_auth_dets,mod_auth_mnesia,mod_auth_plain,mod_auth_server,mod_browser,mod_cgi,mod_dir,mod_disk_log,mod_esi,mod_get,mod_head,mod_htaccess,mod_include,mod_log,mod_range,mod_responsecontrol,mod_security,mod_security_server,mod_trace,tftp,tftp_binary,tftp_engine,tftp_file,tftp_lib,tftp_logger,tftp_sup],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x04b18f2c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<0.7.0>,<0.428.0>]}, {memory,1964}, {message_queue_len,0}, {reductions,42}, {trap_exit,true}]}, {<0.428.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032b10fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a905c8 Return addr 0x00bdc194 ()\ny(0) []\ny(1) inets_app\ny(2) <0.429.0>\ny(3) <0.427.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.427.0>,<0.429.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,42}, {trap_exit,true}]}, {<0.429.0>, [{registered_name,inets_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b18920 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,inets_sup},one_for_one,[{child,<0.436.0>,tftp_sup,{tftp_sup,start_link,[[]]},permanent,infinity,supervisor,[tftp_sup]},{child,<0.435.0>,httpd_sup,{httpd_sup,start_link,[[]]},permanent,infinity,supervisor,[httpd_sup]},{child,<0.431.0>,httpc_sup,{httpc_sup,start_link,[[{httpc,{default,only_session_cookies}}]]},permanent,infinity,supervisor,[httpc_sup]},{child,<0.430.0>,ftp_sup,{ftp_sup,start_link,[]},permanent,infinity,supervisor,[ftp_sup]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,3600,[],inets_sup,[]}\ny(4) inets_sup\ny(5) <0.428.0>\n\n0x04b1893c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,4}]}, {heap_size,377}, {total_heap_size,754}, {links, [<0.430.0>,<0.435.0>,<0.436.0>,<0.431.0>,<0.428.0>]}, {memory,3532}, {message_queue_len,0}, {reductions,320}, {trap_exit,true}]}, {<0.430.0>, [{registered_name,ftp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a90968 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ftp_sup},simple_one_for_one,[{child,undefined,undefined,{ftp,start_link,[]},temporary,4000,worker,[ftp]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,3600,[],ftp_sup,[]}\ny(4) ftp_sup\ny(5) <0.429.0>\n\n0x04a90984 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.429.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,54}, {trap_exit,true}]}, {<0.431.0>, [{registered_name,httpc_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a8fa08 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,httpc_sup},one_for_one,[{child,<0.434.0>,httpc_handler_sup,{httpc_handler_sup,start_link,[]},permanent,infinity,supervisor,[httpc_handler_sup]},{child,<0.432.0>,httpc_profile_sup,{httpc_profile_sup,start_link,[[{httpc,{default,only_session_cookies}}]]},permanent,infinity,supervisor,[httpc_profile_sup]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,3600,[],httpc_sup,[[{httpc,{default,only_session_cookies}}]]}\ny(4) httpc_sup\ny(5) <0.429.0>\n\n0x04a8fa24 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.432.0>,<0.434.0>,<0.429.0>]}, {memory,2916}, {message_queue_len,0}, {reductions,175}, {trap_exit,true}]}, {<0.432.0>, [{registered_name,httpc_profile_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a8f658 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,httpc_profile_sup},one_for_one,[{child,<0.433.0>,httpc_manager,{httpc_manager,start_link,[{default,only_session_cookies}]},permanent,4000,worker,[httpc_manager]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,3600,[],httpc_profile_sup,[[{httpc,{default,only_session_cookies}}]]}\ny(4) httpc_profile_sup\ny(5) <0.431.0>\n\n0x04a8f674 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.431.0>,<0.433.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,124}, {trap_exit,true}]}, {<0.433.0>, [{registered_name,httpc_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b23a24 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) httpc_manager\ny(3) {state,[],307242,{undefined,311339},httpc_manager_session_db,httpc_manager,{options,{undefined,[]},0,2,5,120000,2,disabled,false,inet,default,default}}\ny(4) httpc_manager\ny(5) <0.432.0>\n\n0x04b23a40 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,10946}, {total_heap_size,15127}, {links,[<0.432.0>]}, {memory,60944}, {message_queue_len,0}, {reductions,1086}, {trap_exit,true}]}, {<0.434.0>, [{registered_name,httpc_handler_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x059912e0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,httpc_handler_sup},simple_one_for_one,[{child,undefined,undefined,{httpc_handler,start_link,[]},temporary,4000,worker,[httpc_handler]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,3600,[],httpc_handler_sup,[]}\ny(4) httpc_handler_sup\ny(5) <0.431.0>\n\n0x059912fc Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,6765}, {total_heap_size,10946}, {links,[<0.431.0>]}, {memory,44220}, {message_queue_len,0}, {reductions,626}, {trap_exit,true}]}, {<0.435.0>, [{registered_name,httpd_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d257d8 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,httpd_sup},one_for_one,[],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,3600,[],httpd_sup,[[]]}\ny(4) httpd_sup\ny(5) <0.429.0>\n\n0x00d257f4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.429.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,45}, {trap_exit,true}]}, {<0.436.0>, [{registered_name,tftp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a90168 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,tftp_sup},one_for_one,[],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,3600,[],tftp_sup,[[]]}\ny(4) tftp_sup\ny(5) <0.429.0>\n\n0x04a90184 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.429.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,46}, {trap_exit,true}]}, {<0.449.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,do_accept,6}}, {backtrace, <<"Program counter: 0x04458044 (dist_util:con_loop/9 + 72)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b16c48 Return addr 0x00bdc194 ()\ny(0) []\ny(1) #Fun\ny(2) #Fun\ny(3) {tick,44611,46098,0,4}\ny(4) normal\ny(5) 'ns_1@10.2.1.100'\ny(6) {net_address,{{10,2,1,101},56485},\"10.2.1.101\",tcp,inet}\ny(7) #Port<0.4307>\ny(8) 'ns_1@10.2.1.101'\ny(9) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,44}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.21.0>,#Port<0.4307>]}, {memory,2836}, {message_queue_len,0}, {reductions,5145}, {trap_exit,false}]}, {<0.609.0>, [{registered_name,hot_keys_keeper}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0556a91c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) hot_keys_keeper\ny(3) {state,[{\"default\",[{\"Assdfdf88545244\",[{ops,3.267974e-003}]},{\"Assdfdf1304300285\",[{ops,3.267974e-003}]},{\"Assdfdf623240301\",[{ops,3.267974e-003}]},{\"Assdfdf796192375\",[{ops,3.267974e-003}]},{\"Assdfdf331631225\",[{ops,3.267974e-003}]},{\"Assdfdf88251782\",[{ops,3.267974e-003}]},{\"Assdfdf17166682\",[{ops,3.267974e-003}]},{\"Assdfdf1876246878\",[{ops,3.267974e-003}]},{\"Assdfdf475099662\",[{ops,3.267974e-003}]},{\"Assdfdf1397088614\",[{ops,3.267974e-003}]}]}],[{\"default\",[{\"Assdfdf865624520\",[{ops,1.633987e-003}]},{\"Assdfdf278485659\",[{ops,1.633987e-003}]},{\"Assdfdf1776784409\",[{ops,1.633987e-003}]},{\"Assdfdf1397088614\",[{ops,3.267974e-003}]},{\"Assdfdf475099662\",[{ops,3.267974e-003}]},{\"Assdfdf1196676958\",[{ops,1.633987e-003}]},{\"Assdfdf1876246878\",[{ops,3.267974e-003}]},{\"Assdfdf2091060820\",[{ops,1.633987e-003}]},{\"Assdfdf17166682\",[{ops,3.267974e-003}]},{\"Assdfdf746146124\",[{ops,1.633987e-003}]},{\"Assdfdf1421467409\",[{ops,1.633987e-003}]},{\"Assdfdf329315874\",[{ops,1.633987e-003}]},{\"Assdfdf901760466\",[{ops,1.633987e-003}]},{\"Assdfdf2096801123\",[{ops,1.633987e-003}]},{\"Assdfdf496453710\",[{ops,1.633987e-003}]},{\"Assdfdf1884212029\",[{ops,1.633987e-003}]},{\"Assdfdf1256748920\",[{ops,1.633987e-003}]},{\"Assdfdf88251782\",[{ops,3.267974e-003}]},{\"Assdfdf1942823903\",[{ops,1.633987e-003}]},{\"Assdfdf1049590099\",[{ops,1.633987e-003}]},{\"Assdfdf1553449745\",[{ops,1.633987e-003}]},{\"Assdfdf1288059078\",[{ops,1.633987e-003}]},{\"Assdfdf1531852312\",[{ops,1.633987e-003}]},{\"Assdfdf96796545\",[{ops,1.633987e-003}]},{\"Assdfdf856022456\",[{ops,1.633987e-003}]},{\"Assdfdf691235694\",[{ops,1.633987e-003}]},{\"Assdfdf1915600870\",[{ops,1.633987e-003}]},{\"Assdfdf775513949\",[{ops,1.633987e-003}]},{\"Assdfdf1894281501\",[{ops,1.633987e-003}]},{\"Assdfdf875023943\",[{ops,1.633987e-003}]},{\"Assdfdf1423347330\",[{ops,1.633987e-003}]},{\"Assdfdf331631225\",[{ops,3.267974e-003}]},{\"Assdfdf2038973643\",[{ops,1.633987e-003}]},{\"Assdfdf1621039891\",[{ops,1.633987e-003}]},{\"Assdfdf250717573\",[{ops,1.633987e-003}]},{\"Assdfdf1182426048\",[{ops,1.633987e-003}]},{\"Assdfdf907901409\",[{ops,1.633987e-003}]},{\"Assdfdf657894342\",[{ops,1.633987e-003}]},{\"Assdfdf796192375\",[{ops,3.267974e-003}]},{\"Assdfdf1131820786\",[{ops,1.633987e-003}]},{\"Assdfdf1188740443\",[{ops,1.633987e-003}]},{\"Assdfdf527764276\",[{ops,1.633987e-003}]},{\"Assdfdf1529028110\",[{ops,1.633987e-003}]},{\"Assdfdf342957022\",[{ops,1.633987e-003}]},{\"Assdfdf1163741686\",[{ops,1.633987e-003}]},{\"Assdfdf1676499598\",[{ops,1.633987e-003}]},{\"Assdfdf1832624046\",[{ops,1.633987e-003}]},{\"Assdfdf1360711486\",[{ops,1.633987e-003}]},{\"Assdfdf1640362316\",[{ops,1.633987e-003}]},{\"Assdfdf623240301\",[{ops,3.267974e-003}]},{\"Assdfdf820141458\",[{ops,1.633987e-003}]},{\"Assdfdf386610799\",[{ops,1.633987e-003}]},{\"Assdfdf1257706528\",[{ops,1.633987e-003}]},{\"Assdfdf1960707453\",[{ops,1.633987e-003}]},{\"Assdfdf964692568\",[{ops,1.633987e-003}]},{\"Assdfdf1657663131\",[{ops,1.633987e-003}]},{\"Assdfdf705541058\",[{ops,1.633987e-003}]},{\"Assdfdf2107634416\",[{ops,1.633987e-003}]},{\"Assdfdf920006862\",[{ops,1.633987e-003}]},{\"Assdfdf1836473674\",[{ops,1.633987e-003}]},{\"Assdfdf442813570\",[{ops,1.633987e-003}]},{\"Assdfdf484550192\",[{ops,1.633987e-003}]},{\"Assdfdf12411562\",[{ops,1.633987e-003}]},{\"Assdfdf1837129101\",[{ops,1.633987e-003}]},{\"Assdfdf1328138072\",[{ops,1.633987e-003}]},{\"Assdfdf711807030\",[{ops,1.633987e-003}]},{\"Assdfdf1889113592\",[{ops,1.633987e-003}]},{\"Assdfdf2143033948\",[{ops,1.633987e-003}]},{\"Assdfdf1493126454\",[{ops,1.633987e-003}]},{\"Assdfdf316588978\",[{ops,1.633987e-003}]},{\"Assdfdf404980681\",[{ops,1.633987e-003}]},{\"Assdfdf1773979199\",[{ops,1.633987e-003}]},{\"Assdfdf2091829342\",[{ops,1.633987e-003}]},{\"Assdfdf1841617950\",[{ops,1.633987e-003}]},{\"Assdfdf923255958\",[{ops,1.633987e-003}]},{\"Assdfdf239225763\",[{ops,1.633987e-003}]},{\"Assdfdf1904742182\",[{ops,1.633987e-003}]},{\"Assdfdf872270565\",[{ops,1.633987e-003}]},{\"Assdfdf2033362531\",[{ops,1.633987e-003}]},{\"Assdfdf778853385\",[{ops,1.633987e-003}]},{\"Assdfdf1304300285\",[{ops,3.267974e-003}]},{\"Assdfdf1166283405\",[{ops,1.633987e-003}]},{\"Assdfdf2125043462\",[{ops,1.633987e-003}]},{\"Assdfdf1213409417\",[{ops,1.633987e-003}]},{\"Assdfdf614622801\",[{ops,1.633987e-003}]},{\"Assdfdf1128077525\",[{ops,1.633987e-003}]},{\"Assdfdf1213707798\",[{ops,1.633987e-003}]},{\"Assdfdf1062728305\",[{ops,1.633987e-003}]},{\"Assdfdf1832673100\",[{ops,1.633987e-003}]},{\"Assdfdf1671187005\",[{ops,1.633987e-003}]},{\"Assdfdf1119888593\",[{ops,1.633987e-003}]},{\"Assdfdf1869734233\",[{ops,1.633987e-003}]},{\"Assdfdf1414423573\",[{ops,1.633987e-003}]},{\"Assdfdf1449904452\",[{ops,1.633987e-003}]},{\"Assdfdf430710094\",[{ops,1.633987e-003}]},{\"Assdfdf925545937\",[{ops,1.633987e-003}]},{\"Assdfdf188382281\",[{ops,1.633987e-003}]},{\"Assdfdf88545244\",[{ops,3.267974e-003}]},{\"Assdfdf1566975369\",[{ops,1.633987e-003}]},{\"Assdfdf1634406117\",[{ops,1.633987e-003}]}]}],<0.11998.0>}\ny(4) hot_keys_keeper\ny(5) <0.95.0>\n\n0x0556a938 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,99}]}, {heap_size,46368}, {total_heap_size,75025}, {links,[<0.95.0>,<0.57.0>]}, {memory,300556}, {message_queue_len,0}, {reductions,186062}, {trap_exit,false}]}, {<0.5157.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,do_accept,6}}, {backtrace, <<"Program counter: 0x04458044 (dist_util:con_loop/9 + 72)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04a8fdb0 Return addr 0x00bdc194 ()\ny(0) []\ny(1) #Fun\ny(2) #Fun\ny(3) {tick,9218,20968,2,2}\ny(4) normal\ny(5) 'ns_1@10.2.1.100'\ny(6) {net_address,{{10,2,1,102},56115},\"10.2.1.102\",tcp,inet}\ny(7) #Port<0.5832>\ny(8) 'ns_1@10.2.1.102'\ny(9) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,14}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.21.0>,#Port<0.5832>]}, {memory,2836}, {message_queue_len,0}, {reductions,3719}, {trap_exit,false}]}, {<0.10782.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x04437d3c (menelaus_web:handle_pool_info_wait/6 + 140)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05597d70 Return addr 0x044372c0 (menelaus_web:loop/3 + 12080)\ny(0) \"29164773\"\ny(1) \"29164773\"\ny(2) 3000\ny(3) \"127.0.0.1\"\ny(4) {\"Administrator\",\"j4958ph\"}\ny(5) \"default\"\ny(6) {mochiweb_request,#Port<0.6713>,'GET',\"/pools/default?waitChange=3000&etag=29164773\",{1,1},{15,{\"host\",{'Host',\"localhost:8091\"},{\"accept\",{'Accept',\"application/json, text/javascript, */*\"},nil,{\"accept-language\",{'Accept-Language',\"en-us,en;q=0.5\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip,deflate\"},{\"accept-charset\",{'Accept-Charset',\"ISO-8859-1,utf-8;q=0.7,*;q=0.7\"},nil,nil},nil},{\"connection\",{'Connection',\"keep-alive\"},{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,{\"cache-control\",{'Cache-Control',\"no-cache\"},nil,nil}},{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,nil}}}},{\"user-agent\",{'User-Agent',\"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13\"},{\"keep-alive\",{'Keep-Alive',\"115\"},{\"invalid-auth-response\",{\"invalid-auth-response\",\"on\"},nil,nil},{\"pragma\",{'Pragma',\"no-cache\"},nil,{\"referer\",{'Referer',\"http://localhost:8091/index.html\"},nil,nil}}},{\"x-requested-with\",{\"X-Requested-With\",\"XMLHttpRequest\"},nil,nil}}}}}\n\n0x05597d90 Return addr 0x04443be8 (mochiweb_http:headers/5 + 680)\ny(0) []\ny(1) []\ny(2) []\ny(3) []\ny(4) {mochiweb_request,#Port<0.6713>,'GET',\"/pools/default?waitChange=3000&etag=29164773\",{1,1},{15,{\"host\",{'Host',\"localhost:8091\"},{\"accept\",{'Accept',\"application/json, text/javascript, */*\"},nil,{\"accept-language\",{'Accept-Language',\"en-us,en;q=0.5\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip,deflate\"},{\"accept-charset\",{'Accept-Charset',\"ISO-8859-1,utf-8;q=0.7,*;q=0.7\"},nil,nil},nil},{\"connection\",{'Connection',\"keep-alive\"},{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,{\"cache-control\",{'Cache-Control',\"no-cache\"},nil,nil}},{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,nil}}}},{\"user-agent\",{'User-Agent',\"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13\"},{\"keep-alive\",{'Keep-Alive',\"115\"},{\"invalid-auth-response\",{\"invalid-auth-response\",\"on\"},nil,nil},{\"pragma\",{'Pragma',\"no-cache\"},nil,{\"referer\",{'Referer',\"http://localhost:8091/index.html\"},nil,nil}}},{\"x-requested-with\",{\"X-Requested-With\",\"XMLHttpRequest\"},nil,nil}}}}}\ny(5) Catch 0x04437308 (menelaus_web:loop/3 + 12152)\n\n0x05597dac Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) []\ny(2) []\ny(3) {mochiweb_request,#Port<0.6713>,'GET',\"/pools/default?waitChange=3000&etag=29164773\",{1,1},{15,{\"host\",{'Host',\"localhost:8091\"},{\"accept\",{'Accept',\"application/json, text/javascript, */*\"},nil,{\"accept-language\",{'Accept-Language',\"en-us,en;q=0.5\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip,deflate\"},{\"accept-charset\",{'Accept-Charset',\"ISO-8859-1,utf-8;q=0.7,*;q=0.7\"},nil,nil},nil},{\"connection\",{'Connection',\"keep-alive\"},{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,{\"cache-control\",{'Cache-Control',\"no-cache\"},nil,nil}},{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,nil}}}},{\"user-agent\",{'User-Agent',\"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13\"},{\"keep-alive\",{'Keep-Alive',\"115\"},{\"invalid-auth-response\",{\"invalid-auth-response\",\"on\"},nil,nil},{\"pragma\",{'Pragma',\"no-cache\"},nil,{\"referer\",{'Referer',\"http://localhost:8091/index.html\"},nil,nil}}},{\"x-requested-with\",{\"X-Requested-With\",\"XMLHttpRequest\"},nil,nil}}}}}\n\n0x05597dc0 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,46368}, {total_heap_size,50549}, {links,[<0.96.0>,#Port<0.6713>]}, {memory,202732}, {message_queue_len,0}, {reductions,7780421}, {trap_exit,false}]}, {<0.10944.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x04437d3c (menelaus_web:handle_pool_info_wait/6 + 140)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0553d460 Return addr 0x044372c0 (menelaus_web:loop/3 + 12080)\ny(0) \"29164773\"\ny(1) \"29164773\"\ny(2) 20000\ny(3) \"10.2.1.100\"\ny(4) {\"Administrator\",\"j4958ph\"}\ny(5) \"default\"\ny(6) {mochiweb_request,#Port<0.6735>,'GET',\"/pools/default?waitChange=20000&etag=29164773\",{1,1},{13,{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo; vs=cmd_get%2Ccmd_set%2Ccurr_connections%2Ccurr_items%2Ccurr_items_tot%2Cdisk_writes%2Cep_cache_miss_rate%2Cep_io_num_read%2Cep_oom_errors%2Cep_replica_resident_items_rate%2Cep_resident_items_rate%2Cep_tmp_oom_errors%2Cep_total_persisted%2Cevictions%2Chit_ratio%2Cmem_used%2Cops\"},{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip, deflate\"},{\"accept\",{'Accept',\"application/json, text/javascript, */*\"},nil,nil},{\"accept-language\",{'Accept-Language',\"en-us\"},nil,nil}},{\"connection\",{'Connection',\"Keep-Alive\"},{\"cache-control\",{'Cache-Control',\"no-cache\"},nil,nil},nil}},{\"referer\",{'Referer',\"http://10.2.1.100:8091/index.html#sec=log\"},{\"invalid-auth-response\",{\"invalid-auth-response\",\"on\"},{\"host\",{'Host',\"10.2.1.100:8091\"},nil,nil},{\"pragma\",{'Pragma',\"no-cache\"},nil,nil}},{\"x-requested-with\",{\"X-Requested-With\",\"XMLHttpRequest\"},{\"user-agent\",{'User-Agent',\"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; OfficeLiveConnector.1.5; OfficeLivePatch.1.3; Tablet PC 2.0; .NET4.0C; .NET4.0E; Creative AutoUpdate v1.40.01)\"},nil,nil},nil}}}}}\n\n0x0553d480 Return addr 0x04443be8 (mochiweb_http:headers/5 + 680)\ny(0) []\ny(1) []\ny(2) []\ny(3) []\ny(4) {mochiweb_request,#Port<0.6735>,'GET',\"/pools/default?waitChange=20000&etag=29164773\",{1,1},{13,{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo; vs=cmd_get%2Ccmd_set%2Ccurr_connections%2Ccurr_items%2Ccurr_items_tot%2Cdisk_writes%2Cep_cache_miss_rate%2Cep_io_num_read%2Cep_oom_errors%2Cep_replica_resident_items_rate%2Cep_resident_items_rate%2Cep_tmp_oom_errors%2Cep_total_persisted%2Cevictions%2Chit_ratio%2Cmem_used%2Cops\"},{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip, deflate\"},{\"accept\",{'Accept',\"application/json, text/javascript, */*\"},nil,nil},{\"accept-language\",{'Accept-Language',\"en-us\"},nil,nil}},{\"connection\",{'Connection',\"Keep-Alive\"},{\"cache-control\",{'Cache-Control',\"no-cache\"},nil,nil},nil}},{\"referer\",{'Referer',\"http://10.2.1.100:8091/index.html#sec=log\"},{\"invalid-auth-response\",{\"invalid-auth-response\",\"on\"},{\"host\",{'Host',\"10.2.1.100:8091\"},nil,nil},{\"pragma\",{'Pragma',\"no-cache\"},nil,nil}},{\"x-requested-with\",{\"X-Requested-With\",\"XMLHttpRequest\"},{\"user-agent\",{'User-Agent',\"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; OfficeLiveConnector.1.5; OfficeLivePatch.1.3; Tablet PC 2.0; .NET4.0C; .NET4.0E; Creative AutoUpdate v1.40.01)\"},nil,nil},nil}}}}}\ny(5) Catch 0x04437308 (menelaus_web:loop/3 + 12152)\n\n0x0553d49c Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) []\ny(2) []\ny(3) {mochiweb_request,#Port<0.6735>,'GET',\"/pools/default?waitChange=20000&etag=29164773\",{1,1},{13,{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo; vs=cmd_get%2Ccmd_set%2Ccurr_connections%2Ccurr_items%2Ccurr_items_tot%2Cdisk_writes%2Cep_cache_miss_rate%2Cep_io_num_read%2Cep_oom_errors%2Cep_replica_resident_items_rate%2Cep_resident_items_rate%2Cep_tmp_oom_errors%2Cep_total_persisted%2Cevictions%2Chit_ratio%2Cmem_used%2Cops\"},{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip, deflate\"},{\"accept\",{'Accept',\"application/json, text/javascript, */*\"},nil,nil},{\"accept-language\",{'Accept-Language',\"en-us\"},nil,nil}},{\"connection\",{'Connection',\"Keep-Alive\"},{\"cache-control\",{'Cache-Control',\"no-cache\"},nil,nil},nil}},{\"referer\",{'Referer',\"http://10.2.1.100:8091/index.html#sec=log\"},{\"invalid-auth-response\",{\"invalid-auth-response\",\"on\"},{\"host\",{'Host',\"10.2.1.100:8091\"},nil,nil},{\"pragma\",{'Pragma',\"no-cache\"},nil,nil}},{\"x-requested-with\",{\"X-Requested-With\",\"XMLHttpRequest\"},{\"user-agent\",{'User-Agent',\"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; OfficeLiveConnector.1.5; OfficeLivePatch.1.3; Tablet PC 2.0; .NET4.0C; .NET4.0E; Creative AutoUpdate v1.40.01)\"},nil,nil},nil}}}}}\n\n0x0553d4b0 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,15}]}, {heap_size,46368}, {total_heap_size,121393}, {links,[<0.96.0>,#Port<0.6735>]}, {memory,486108}, {message_queue_len,0}, {reductions,971264}, {trap_exit,false}]}, {<0.11252.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00ee9d08 (prim_inet:recv0/3 + 112)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0588542c Return addr 0x0444377c (mochiweb_http:request/2 + 60)\ny(0) 60022\ny(1) #Port<0.6771>\n\n0x05885438 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) #Port<0.6771>\n\n0x05885444 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,28657}, {total_heap_size,103682}, {links,[<0.96.0>,#Port<0.6771>]}, {memory,415184}, {message_queue_len,0}, {reductions,699866}, {trap_exit,false}]}, {<0.11394.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03279e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04ac5150 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_port_server\ny(3) {state,#Port<0.6779>,vbucketmigrator,{[\"Authenticated towards: {Sock 10.2.1.100:11210}\",\"Authenticating towards: {Sock 10.2.1.100:11210}\"],[\"Connecting to {Sock 10.2.1.100:11210}\"]},undefined,[],0}\ny(4) <0.11394.0>\ny(5) <0.260.0>\n\n0x04ac516c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,6765}, {total_heap_size,6765}, {links,[<0.260.0>,#Port<0.6779>]}, {memory,27516}, {message_queue_len,0}, {reductions,243}, {trap_exit,true}]}, {<0.11987.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0327a32c (gen_server:do_multi_call/4 + 384)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04aad27c Return addr 0x032c99e8 (rpc:do_multicall/5 + 160)\ny(0) []\ny(1) #Ref<0.0.0.246340>\ny(2) <0.12006.0>\ny(3) #Ref<0.0.0.246339>\n\n0x04aad290 Return addr 0x04c09d14 (diag_handler:diag_multicall/3 + 116)\ny(0) []\ny(1) []\n\n0x04aad29c Return addr 0x04c0a324 (diag_handler:handle_diag/1 + 192)\ny(0) ['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']\ny(1) []\ny(2) []\ny(3) []\n\n0x04aad2b0 Return addr 0x0443704c (menelaus_web:loop/3 + 11452)\ny(0) []\ny(1) [[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"53\",58,[\"0\",\"8\"],46,\"384\"],32,\"ns_node_disco\",58,\"3\",58,\"info\",58,\"cookie update\",32,45,32,\"Initial otp cookie generated: pmqchiglstnppkwf\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"53\",58,[\"0\",\"8\"],46,\"430\"],32,\"menelaus_app\",58,\"1\",58,\"info\",58,\"web start ok\",32,45,32,\"Membase Server has started on web port 8091 on node 'ns_1@10.2.1.100'.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"54\",58,\"23\",46,\"489\"],32,\"menelaus_web\",58,\"12\",58,\"info\",58,\"message\",32,45,32,\"Created bucket \\\"default\\\" of type: membase\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"55\",58,[\"0\",\"8\"],46,\"136\"],32,\"ns_node_disco\",58,\"4\",58,\"info\",58,\"node up\",32,45,32,\"Node 'ns_1@10.2.1.100' saw that node 'ns_1@10.2.1.101' came up.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"55\",58,[\"0\",\"8\"],46,\"383\"],32,\"menelaus_app\",58,\"1\",58,\"info\",58,\"web start ok\",32,45,32,\"Membase Server has started on web port 8091 on node 'ns_1@10.2.1.101'.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"55\",58,[\"0\",\"8\"],46,\"695\"],32,\"ns_cluster\",58,\"3\",58,\"info\",58,\"message\",32,45,32,\"Node ns_1@10.2.1.101 joined cluster\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"55\",58,\"15\",46,\"374\"],32,\"ns_orchestrator\",58,\"4\",58,\"info\",58,\"message\",32,45,32,\"Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101'], EjectNodes = []\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"55\",58,\"21\",46,\"458\"],32,\"ns_orchestrator\",58,\"2\",58,\"info\",58,\"message\",32,45,32,\"Rebalance exited with reason wait_for_memcached_failed\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"55\",58,\"51\",46,[\"0\",\"83\"]],32,\"ns_memcached\",58,\"1\",58,\"info\",58,\"message\",32,45,32,\"Bucket \\\"default\\\" loaded on node 'ns_1@10.2.1.100' in 1 seconds.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"56\",58,\"38\",46,\"567\"],32,\"ns_memcached\",58,\"1\",58,\"info\",58,\"message\",32,45,32,\"Bucket \\\"default\\\" loaded on node 'ns_1@10.2.1.101' in 1 seconds.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"12\",58,\"56\",58,\"46\",46,\"713\"],32,\"ns_orchestrator\",58,\"1\",58,\"info\",58,\"message\",32,45,32,\"Rebalance completed successfully.\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,[\"0\",\"1\"],58,[\"0\",\"8\"],46,\"373\"],32,\"ns_orchestrator\",58,\"4\",58,\"info\",58,\"message\",32,45,32,\"Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101'], EjectNodes = []\\n (repeated 1 times)\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"11\",58,\"29\",46,\"429\"],32,\"menelaus_app\",58,\"1\",58,\"info\",58,\"web start ok\",32,45,32,\"Membase Server has started on web port 8091 on node 'ns_1@10.2.1.102'.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"11\",58,\"29\",46,\"585\"],32,\"ns_node_disco\",58,\"4\",58,\"info\",58,\"node up\",32,45,32,\"Node 'ns_1@10.2.1.102' saw that node 'ns_1@10.2.1.101' came up.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"11\",58,\"29\",46,\"773\"],32,\"ns_cluster\",58,\"3\",58,\"info\",58,\"message\",32,45,32,\"Node ns_1@10.2.1.102 joined cluster\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"11\",58,\"29\",46,\"788\"],32,\"ns_node_disco\",58,\"4\",58,\"info\",58,\"node up\",32,45,32,\"Node 'ns_1@10.2.1.100' saw that node 'ns_1@10.2.1.102' came up.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"11\",58,\"30\",46,\"304\"],32,\"ns_node_disco\",58,\"4\",58,\"info\",58,\"node up\",32,45,32,\"Node 'ns_1@10.2.1.101' saw that node 'ns_1@10.2.1.102' came up.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"11\",58,\"32\",46,\"596\"],32,\"ns_orchestrator\",58,\"4\",58,\"info\",58,\"message\",32,45,32,\"Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101',\\n 'ns_1@10.2.1.102'], EjectNodes = []\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"11\",58,\"38\",46,\"696\"],32,\"ns_orchestrator\",58,\"2\",58,\"info\",58,\"message\",32,45,32,\"Rebalance exited with reason wait_for_memcached_failed\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"12\",58,\"56\",46,\"806\"],32,\"ns_memcached\",58,\"1\",58,\"info\",58,\"message\",32,45,32,\"Bucket \\\"default\\\" loaded on node 'ns_1@10.2.1.102' in 1 seconds.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"17\",58,[\"0\",\"8\"],46,\"372\"],32,\"ns_orchestrator\",58,\"2\",58,\"info\",58,\"message\",32,45,32,\"Rebalance exited with reason wait_for_memcached_failed\\n (repeated 2 times)\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"17\",58,[\"0\",\"8\"],46,\"372\"],32,\"ns_orchestrator\",58,\"4\",58,\"info\",58,\"message\",32,45,32,\"Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101',\\n 'ns_1@10.2.1.102'], EjectNodes = []\\n (repeated 3 times)\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"24\",58,[\"0\",\"2\"],46,\"961\"],32,\"ns_orchestrator\",58,\"2\",58,\"info\",58,\"message\",32,45,32,\"Rebalance exited with reason stopped\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"24\",58,\"21\",46,\"634\"],32,\"ns_orchestrator\",58,\"4\",58,\"info\",58,\"message\",32,45,32,\"Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101',\\n 'ns_1@10.2.1.102'], EjectNodes = []\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"39\",58,\"18\",46,\"702\"],32,\"ns_orchestrator\",58,\"2\",58,\"info\",58,\"message\",32,45,32,\"Rebalance exited with reason stopped\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"39\",58,\"25\",46,[\"0\",\"67\"]],32,\"ns_orchestrator\",58,\"4\",58,\"info\",58,\"message\",32,45,32,\"Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101',\\n 'ns_1@10.2.1.102'], EjectNodes = []\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"39\",58,\"38\",46,\"796\"],32,\"ns_memcached\",58,\"4\",58,\"info\",58,\"message\",32,45,32,\"Control connection to memcached on 'ns_1@10.2.1.101' disconnected: {{badmatch,\\n {error,\\n timeout}},\\n [{mc_client_binary,\\n cmd_binary_vocal_recv,\\n 5},\\n {mc_client_binary,\\n delete_vbucket,\\n 2},\\n {ns_memcached,\\n handle_call,\\n 3},\\n {gen_server,\\n handle_msg,\\n 5},\\n {proc_lib,\\n init_p_do_apply,\\n 3}]}\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"39\",58,\"38\",46,\"858\"],32,\"ns_orchestrator\",58,\"2\",58,\"info\",58,\"message\",32,45,32,\"Rebalance exited with reason {{{badmatch,{error,timeout}},\\n [{mc_client_binary,cmd_binary_vocal_recv,5},\\n {mc_client_binary,delete_vbucket,2},\\n {ns_memcached,handle_call,3},\\n {gen_server,handle_msg,5},\\n {proc_lib,init_p_do_apply,3}]},\\n {gen_server,call,\\n [{'ns_memcached-default','ns_1@10.2.1.101'},\\n {delete_vbucket,633},\\n 30000]}}\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"39\",58,\"41\",46,\"885\"],32,\"ns_memcached\",58,\"1\",58,\"info\",58,\"message\",32,45,32,\"Bucket \\\"default\\\" loaded on node 'ns_1@10.2.1.101' in 0 seconds.\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"40\",58,\"13\",46,[\"0\",\"53\"]],32,\"ns_orchestrator\",58,\"2\",58,\"info\",58,\"message\",32,45,32,\"Rebalance exited with reason {{{badmatch,{error,timeout}},\\n [{mc_client_binary,cmd_binary_vocal_recv,5},\\n {mc_client_binary,delete_vbucket,2},\\n {ns_memcached,handle_call,3},\\n {gen_server,handle_msg,5},\\n {proc_lib,init_p_do_apply,3}]},\\n {gen_server,call,\\n [{'ns_memcached-default','ns_1@10.2.1.101'},\\n {delete_vbucket,65},\\n 30000]}}\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"40\",58,\"35\",46,[\"0\",\"18\"]],32,\"ns_orchestrator\",58,\"2\",58,\"info\",58,\"message\",32,45,32,\"Rebalance exited with reason {{{badmatch,{error,timeout}},\\n [{mc_client_binary,cmd_binary_vocal_recv,5},\\n {mc_client_binary,delete_vbucket,2},\\n {ns_memcached,handle_call,3},\\n {gen_server,handle_msg,5},\\n {proc_lib,init_p_do_apply,3}]},\\n {gen_server,call,\\n [{'ns_memcached-default','ns_1@10.2.1.101'},\\n {delete_vbucket,70},\\n 30000]}}\\n\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"45\",58,[\"0\",\"7\"],46,\"927\"],32,\"ns_memcached\",58,\"1\",58,\"info\",58,\"message\",32,45,32,\"Bucket \\\"default\\\" loaded on node 'ns_1@10.2.1.101' in 0 seconds. (repeated 9 times)\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"45\",58,[\"0\",\"7\"],46,\"927\"],32,\"ns_memcached\",58,\"4\",58,\"info\",58,\"message\",32,45,32,\"Control connection to memcached on 'ns_1@10.2.1.101' disconnected: {{badmatch,\\n {error,\\n timeout}},\\n [{mc_client_binary,\\n cmd_binary_vocal_recv,\\n 5},\\n {mc_client_binary,\\n delete_vbucket,\\n 2},\\n {ns_memcached,\\n handle_call,\\n 3},\\n {gen_server,\\n handle_msg,\\n 5},\\n {proc_lib,\\n init_p_do_apply,\\n 3}]} (repeated 9 times)\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"45\",58,[\"0\",\"8\"],46,\"378\"],32,\"ns_orchestrator\",58,\"4\",58,\"info\",58,\"message\",32,45,32,\"Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101',\\n 'ns_1@10.2.1.102'], EjectNodes = []\\n (repeated 2 times)\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"45\",58,\"20\",46,\"672\"],32,\"ns_memcached\",58,\"4\",58,\"info\",58,\"message\",32,45,32,\"Control connection to memcached on 'ns_1@10.2.1.101' disconnected: {{badmatch,\\n {error,\\n timeout}},\\n [{mc_client_binary,\\n cmd_binary_vocal_recv,\\n 5},\\n {mc_client_binary,\\n delete_vbucket,\\n 2},\\n {ns_memcached,\\n handle_call,\\n 3},\\n {gen_server,\\n handle_msg,\\n 5},\\n {proc_lib,\\n init_p_do_apply,\\n 3}]}\",\"\\n\",[\"2011\",45,[\"0\",\"1\"],45,[\"0\",\"3\"],32,\"13\",58,\"45\",58,\"20\",46,\"813\"],32,\"ns_memcached\",58,\"1\",58,\"info\",58,\"message\",32,45,32,\"Bucket \\\"default\\\" loaded on node 'ns_1@10.2.1.101' in 0 seconds.\",\"\\n\"]\ny(2) [{\"default\",[{num_replicas,1},{ram_quota,3426746368},{auth_type,sasl},{sasl_password,[]},{type,membase},{num_vbuckets,1024},{ht_size,3079},{tap_keepalive,0},{tap_noop_interval,20},{max_txn_size,1000},{ht_locks,5},{servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]\ny(3) {mochiweb_request,#Port<0.6855>,'GET',\"/diag\",{1,1},{8,{\"accept\",{'Accept',\"*/*\"},nil,{\"referer\",{'Referer',\"http://10.2.1.100:8091/index.html\"},{\"accept-language\",{'Accept-Language',\"en-US\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip, deflate\"},nil,nil},{\"host\",{'Host',\"10.2.1.100:8091\"},{\"connection\",{'Connection',\"Keep-Alive\"},nil,{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo; vs=cmd_get%2Ccmd_set%2Ccurr_connections%2Ccurr_items%2Ccurr_items_tot%2Cdisk_writes%2Cep_cache_miss_rate%2Cep_io_num_read%2Cep_oom_errors%2Cep_replica_resident_items_rate%2Cep_resident_items_rate%2Cep_tmp_oom_errors%2Cep_total_persisted%2Cevictions%2Chit_ratio%2Cmem_used%2Cops\"},nil,nil}},nil}},{\"user-agent\",{'User-Agent',\"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; OfficeLiveConnector.1.5; OfficeLivePatch.1.3; Tablet PC 2.0; .NET4.0C; .NET4.0E; Creative AutoUpdate v1.40.01)\"},nil,nil}}}}}\n\n0x04aad2c4 Return addr 0x04443be8 (mochiweb_http:headers/5 + 680)\ny(0) []\ny(1) []\ny(2) []\ny(3) []\ny(4) {mochiweb_request,#Port<0.6855>,'GET',\"/diag\",{1,1},{8,{\"accept\",{'Accept',\"*/*\"},nil,{\"referer\",{'Referer',\"http://10.2.1.100:8091/index.html\"},{\"accept-language\",{'Accept-Language',\"en-US\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip, deflate\"},nil,nil},{\"host\",{'Host',\"10.2.1.100:8091\"},{\"connection\",{'Connection',\"Keep-Alive\"},nil,{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo; vs=cmd_get%2Ccmd_set%2Ccurr_connections%2Ccurr_items%2Ccurr_items_tot%2Cdisk_writes%2Cep_cache_miss_rate%2Cep_io_num_read%2Cep_oom_errors%2Cep_replica_resident_items_rate%2Cep_resident_items_rate%2Cep_tmp_oom_errors%2Cep_total_persisted%2Cevictions%2Chit_ratio%2Cmem_used%2Cops\"},nil,nil}},nil}},{\"user-agent\",{'User-Agent',\"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; OfficeLiveConnector.1.5; OfficeLivePatch.1.3; Tablet PC 2.0; .NET4.0C; .NET4.0E; Creative AutoUpdate v1.40.01)\"},nil,nil}}}}}\ny(5) Catch 0x04437308 (menelaus_web:loop/3 + 12152)\n\n0x04aad2e0 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) []\ny(2) []\ny(3) {mochiweb_request,#Port<0.6855>,'GET',\"/diag\",{1,1},{8,{\"accept\",{'Accept',\"*/*\"},nil,{\"referer\",{'Referer',\"http://10.2.1.100:8091/index.html\"},{\"accept-language\",{'Accept-Language',\"en-US\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip, deflate\"},nil,nil},{\"host\",{'Host',\"10.2.1.100:8091\"},{\"connection\",{'Connection',\"Keep-Alive\"},nil,{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo; vs=cmd_get%2Ccmd_set%2Ccurr_connections%2Ccurr_items%2Ccurr_items_tot%2Cdisk_writes%2Cep_cache_miss_rate%2Cep_io_num_read%2Cep_oom_errors%2Cep_replica_resident_items_rate%2Cep_resident_items_rate%2Cep_tmp_oom_errors%2Cep_total_persisted%2Cevictions%2Chit_ratio%2Cmem_used%2Cops\"},nil,nil}},nil}},{\"user-agent\",{'User-Agent',\"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; OfficeLiveConnector.1.5; OfficeLivePatch.1.3; Tablet PC 2.0; .NET4.0C; .NET4.0E; Creative AutoUpdate v1.40.01)\"},nil,nil}}}}}\n\n0x04aad2f4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,28657}, {total_heap_size,75025}, {links,[<0.96.0>,#Port<0.6855>]}, {memory,300636}, {message_queue_len,0}, {reductions,44256}, {trap_exit,false}]}, {<0.11992.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00efdbb0 (gen:wait_resp_mon/3 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b7e2ec Return addr 0x03279274 (gen_server:call/3 + 80)\ny(0) 30000\ny(1) #Ref<0.0.0.246225>\ny(2) 'ns_1@10.2.1.101'\n\n0x04b7e2fc Return addr 0x00f148f4 (lists:foreach/2 + 64)\ny(0) 30000\ny(1) {delete_vbucket,41}\ny(2) {'ns_memcached-default','ns_1@10.2.1.101'}\ny(3) Catch 0x03279274 (gen_server:call/3 + 80)\n\n0x04b7e310 Return addr 0x04a52884 (ns_vbm_sup:start_replicas/4 + 160)\ny(0) #Fun\ny(1) [{40,dead},{39,dead},{38,dead},{37,dead},{36,dead},{35,dead},{34,dead},{33,dead},{32,dead},{31,dead},{30,dead},{29,dead},{28,dead},{27,dead},{26,dead},{25,dead},{24,dead},{23,dead},{22,dead},{21,dead},{20,dead},{19,dead},{18,dead},{17,dead},{16,dead},{15,dead},{14,dead},{13,dead},{12,dead},{11,dead},{10,dead},{9,dead},{8,dead},{7,dead},{6,dead},{5,dead},{4,dead},{3,dead},{2,dead},{1,dead},{0,dead}]\n\n0x04b7e31c Return addr 0x00f148f4 (lists:foreach/2 + 64)\ny(0) 'ns_1@10.2.1.102'\ny(1) \"default\"\ny(2) [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0]\ny(3) 'ns_1@10.2.1.101'\n\n0x04b7e330 Return addr 0x04a53104 (ns_vbm_sup:'-set_replicas/2-fun-1-'/3 + 148)\ny(0) #Fun\ny(1) []\n\n0x04b7e33c Return addr 0x00f148f4 (lists:foreach/2 + 64)\ny(0) Catch 0x04a53114 (ns_vbm_sup:'-set_replicas/2-fun-1-'/3 + 164)\ny(1) \"default\"\ny(2) [{634,'ns_1@10.2.1.100'},{633,'ns_1@10.2.1.101'},{632,'ns_1@10.2.1.101'},{631,'ns_1@10.2.1.100'},{630,'ns_1@10.2.1.100'},{629,'ns_1@10.2.1.101'},{628,'ns_1@10.2.1.101'},{627,'ns_1@10.2.1.100'},{626,'ns_1@10.2.1.100'},{625,'ns_1@10.2.1.101'},{624,'ns_1@10.2.1.101'},{623,'ns_1@10.2.1.100'},{622,'ns_1@10.2.1.100'},{621,'ns_1@10.2.1.101'},{620,'ns_1@10.2.1.101'},{619,'ns_1@10.2.1.100'},{618,'ns_1@10.2.1.100'},{617,'ns_1@10.2.1.101'},{616,'ns_1@10.2.1.101'},{615,'ns_1@10.2.1.100'},{614,'ns_1@10.2.1.100'},{613,'ns_1@10.2.1.101'},{612,'ns_1@10.2.1.101'},{611,'ns_1@10.2.1.100'},{610,'ns_1@10.2.1.100'},{609,'ns_1@10.2.1.101'},{608,'ns_1@10.2.1.101'},{607,'ns_1@10.2.1.100'},{606,'ns_1@10.2.1.100'},{605,'ns_1@10.2.1.101'},{604,'ns_1@10.2.1.101'},{603,'ns_1@10.2.1.100'},{602,'ns_1@10.2.1.100'},{601,'ns_1@10.2.1.101'},{600,'ns_1@10.2.1.101'},{599,'ns_1@10.2.1.100'},{598,'ns_1@10.2.1.100'},{597,'ns_1@10.2.1.101'},{596,'ns_1@10.2.1.101'},{595,'ns_1@10.2.1.100'},{594,'ns_1@10.2.1.100'},{593,'ns_1@10.2.1.101'},{592,'ns_1@10.2.1.101'},{591,'ns_1@10.2.1.100'},{590,'ns_1@10.2.1.100'},{589,'ns_1@10.2.1.101'},{588,'ns_1@10.2.1.101'},{587,'ns_1@10.2.1.100'},{586,'ns_1@10.2.1.100'},{585,'ns_1@10.2.1.101'},{584,'ns_1@10.2.1.101'},{583,'ns_1@10.2.1.100'},{582,'ns_1@10.2.1.100'},{581,'ns_1@10.2.1.101'},{580,'ns_1@10.2.1.101'},{579,'ns_1@10.2.1.100'},{578,'ns_1@10.2.1.100'},{577,'ns_1@10.2.1.101'},{576,'ns_1@10.2.1.101'},{575,'ns_1@10.2.1.100'},{574,'ns_1@10.2.1.100'},{573,'ns_1@10.2.1.101'},{572,'ns_1@10.2.1.101'},{571,'ns_1@10.2.1.100'},{570,'ns_1@10.2.1.100'},{569,'ns_1@10.2.1.101'},{568,'ns_1@10.2.1.101'},{567,'ns_1@10.2.1.100'},{566,'ns_1@10.2.1.100'},{565,'ns_1@10.2.1.101'},{564,'ns_1@10.2.1.101'},{563,'ns_1@10.2.1.100'},{562,'ns_1@10.2.1.100'},{561,'ns_1@10.2.1.101'},{560,'ns_1@10.2.1.101'},{559,'ns_1@10.2.1.100'},{558,'ns_1@10.2.1.100'},{557,'ns_1@10.2.1.101'},{556,'ns_1@10.2.1.101'},{555,'ns_1@10.2.1.100'},{554,'ns_1@10.2.1.100'},{553,'ns_1@10.2.1.101'},{552,'ns_1@10.2.1.101'},{551,'ns_1@10.2.1.100'},{550,'ns_1@10.2.1.100'},{549,'ns_1@10.2.1.101'},{548,'ns_1@10.2.1.101'},{547,'ns_1@10.2.1.100'},{546,'ns_1@10.2.1.100'},{545,'ns_1@10.2.1.101'},{544,'ns_1@10.2.1.101'},{543,'ns_1@10.2.1.100'},{542,'ns_1@10.2.1.100'},{541,'ns_1@10.2.1.101'},{540,'ns_1@10.2.1.101'},{539,'ns_1@10.2.1.100'},{538,'ns_1@10.2.1.100'},{537,'ns_1@10.2.1.101'},{536,'ns_1@10.2.1.101'},{535,'ns_1@10.2.1.100'},{534,'ns_1@10.2.1.100'},{533,'ns_1@10.2.1.101'},{532,'ns_1@10.2.1.101'},{531,'ns_1@10.2.1.100'},{530,'ns_1@10.2.1.100'},{529,'ns_1@10.2.1.101'},{528,'ns_1@10.2.1.101'},{527,'ns_1@10.2.1.100'},{526,'ns_1@10.2.1.100'},{525,'ns_1@10.2.1.101'},{524,'ns_1@10.2.1.101'},{523,'ns_1@10.2.1.100'},{522,'ns_1@10.2.1.100'},{521,'ns_1@10.2.1.101'},{520,'ns_1@10.2.1.101'},{519,'ns_1@10.2.1.100'},{518,'ns_1@10.2.1.100'},{517,'ns_1@10.2.1.101'},{516,'ns_1@10.2.1.101'},{515,'ns_1@10.2.1.100'},{514,'ns_1@10.2.1.100'},{513,'ns_1@10.2.1.101'},{512,'ns_1@10.2.1.101'},{70,'ns_1@10.2.1.100'},{69,'ns_1@10.2.1.100'},{68,'ns_1@10.2.1.101'},{67,'ns_1@10.2.1.101'},{66,'ns_1@10.2.1.100'},{65,'ns_1@10.2.1.100'},{64,'ns_1@10.2.1.101'},{63,'ns_1@10.2.1.101'},{62,'ns_1@10.2.1.100'},{61,'ns_1@10.2.1.100'},{60,'ns_1@10.2.1.101'},{59,'ns_1@10.2.1.101'},{58,'ns_1@10.2.1.100'},{57,'ns_1@10.2.1.100'},{56,'ns_1@10.2.1.101'},{55,'ns_1@10.2.1.101'},{54,'ns_1@10.2.1.100'},{53,'ns_1@10.2.1.100'},{52,'ns_1@10.2.1.101'},{51,'ns_1@10.2.1.101'},{50,'ns_1@10.2.1.101'},{49,'ns_1@10.2.1.101'},{48,'ns_1@10.2.1.101'},{47,'ns_1@10.2.1.101'},{46,'ns_1@10.2.1.101'},{45,'ns_1@10.2.1.101'},{44,'ns_1@10.2.1.101'},{43,'ns_1@10.2.1.101'},{42,'ns_1@10.2.1.101'},{41,'ns_1@10.2.1.101'},{40,'ns_1@10.2.1.101'},{39,'ns_1@10.2.1.101'},{38,'ns_1@10.2.1.101'},{37,'ns_1@10.2.1.101'},{36,'ns_1@10.2.1.101'},{35,'ns_1@10.2.1.101'},{34,'ns_1@10.2.1.101'},{33,'ns_1@10.2.1.101'},{32,'ns_1@10.2.1.101'},{31,'ns_1@10.2.1.101'},{30,'ns_1@10.2.1.101'},{29,'ns_1@10.2.1.101'},{28,'ns_1@10.2.1.101'},{27,'ns_1@10.2.1.101'},{26,'ns_1@10.2.1.101'},{25,'ns_1@10.2.1.101'},{24,'ns_1@10.2.1.101'},{23,'ns_1@10.2.1.101'},{22,'ns_1@10.2.1.101'},{21,'ns_1@10.2.1.101'},{20,'ns_1@10.2.1.101'},{19,'ns_1@10.2.1.101'},{18,'ns_1@10.2.1.101'},{17,'ns_1@10.2.1.101'},{16,'ns_1@10.2.1.101'},{15,'ns_1@10.2.1.101'},{14,'ns_1@10.2.1.101'},{13,'ns_1@10.2.1.101'},{12,'ns_1@10.2.1.101'},{11,'ns_1@10.2.1.101'},{10,'ns_1@10.2.1.101'},{9,'ns_1@10.2.1.101'},{8,'ns_1@10.2.1.101'},{7,'ns_1@10.2.1.101'},{6,'ns_1@10.2.1.101'},{5,'ns_1@10.2.1.101'},{4,'ns_1@10.2.1.101'},{3,'ns_1@10.2.1.101'},{2,'ns_1@10.2.1.101'},{1,'ns_1@10.2.1.101'},{0,'ns_1@10.2.1.101'}]\ny(3) 'ns_1@10.2.1.102'\n\n0x04b7e350 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) []\n\n0x04b7e35c Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,12}]}, {heap_size,17711}, {total_heap_size,46368}, {links,[<0.110.0>]}, {memory,185948}, {message_queue_len,0}, {reductions,2374556}, {trap_exit,false}]}, {<0.12004.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00ee92d8 (prim_inet:accept0/2 + 92)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b181b0 Return addr 0x03318894 (inet_tcp:accept/1 + 20)\ny(0) 1529\ny(1) #Port<0.1928>\n\n0x04b181bc Return addr 0x0444af1c (mochiweb_socket_server:acceptor_loop/1 + 80)\ny(0) []\n\n0x04b181c4 Return addr 0x00f0d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) <0.96.0>\ny(2) Catch 0x0444af1c (mochiweb_socket_server:acceptor_loop/1 + 80)\n\n0x04b181d4 Return addr 0x00bdc194 ()\ny(0) Catch 0x00f0d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<0.96.0>]}, {memory,1984}, {message_queue_len,0}, {reductions,18}, {trap_exit,false}]}, {<0.12006.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x0327a63c (gen_server:rec_nodes/7 + 120)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05b2793c Return addr 0x0327d6d8 (gen_server:'-do_multi_call/4-fun-0-'/6 + 356)\ny(0) #Ref<0.0.0.246345>\ny(1) 2000\ny(2) [{'ns_1@10.2.1.102',[{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{manifest,[\"bucket_engine_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r\",\"curl-7.21.1-w64_patched.tar.gz\r\",\"ep-engine_1.6.5r_4_g9d25ede-MINGW32_NT-6.0.i686.tar.gz\r\",\"libconflate_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"libevent-2.0.7-rc.tar.gz\r\",\"libmemcached-0.41_trond-norbye_mingw32-revno895.tar.gz\r\",\"libvbucket_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"membase-cli_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"memcached_1.4.4_359_g06c7d3b-MINGW32_NT-6.0.i686.tar.gz\r\",\"moxi_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"ns_server_1.6.5r.tar.gz\r\",\"pthreads-w64-2-8-0-release.tar.gz\r\",\"vbucketmigrator_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r\",\"wallace_1.6.5r-2-gc6cf01c-win64-201012280140\r\"]},{config,[{{node,'ns_1@10.2.1.101',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.102',memcached},[{port,11210},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{otp,[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{cookie,pmqchiglstnppkwf}]},{memory_quota,3268},{{node,'ns_1@10.2.1.102',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.100',membership},active},{rebalance_status,{none,<<76 bytes>>}},{{node,'ns_1@10.2.1.101',membership},active},{rest_creds,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{creds,[{\"Administrator\",[{password,'filtered-out'}]}]}]},{buckets,[{'_vclock',[{'ns_1@10.2.1.100',{9,63461309965}}]},{configs,[{\"default\",[{num_replicas,1},{ram_quota,3426746368},{auth_type,sasl},{sasl_password,[]},{type,membase},{num_vbuckets,1024},{ht_size,3079},{tap_keepalive,0},{tap_noop_interval,20},{max_txn_size,1000},{ht_locks,5},{servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]}]},{port_servers,[{moxi,\"./bin/moxi/moxi\",[\"-Z\",{\"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",[port]},\"-z\",{\"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming\",[{rest,port}]},\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",{\"~s\",[{ns_moxi_sup,rest_user,[]}]}},{\"MOXI_SASL_PLAIN_PWD\",{\"~s\",[{ns_moxi_sup,rest_pass,[]}]}}]},use_stdio,stderr_to_stdout,stream]},{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",{\"~B\",[port]},\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",{\"admin=~s;default_bucket_name=default;auto_create=false\",[admin_user]},{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",{\"~s\",[{isasl,path}]}},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]}]},{alerts,[{email,[]},{email_alerts,false},{email_server,[{user,undefined},{pass,'filtered-out'},{addr,undefined},{port,undefined},{encrypt,false}]},{alerts,[server_down,server_unresponsive,server_up,server_joined,server_left,bucket_created,bucket_deleted,bucket_auth_failed]}]},{nodes_wanted,[{'_vclock',[{'ns_1@10.2.1.100',{2,63461308289}}]},'ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{rest,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{port,8091}]},{{node,'ns_1@10.2.1.102',membership},active},{{node,'ns_1@10.2.1.100',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.101',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.102',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{moxi,[{port,11211},{verbosity,[]}]},{replication,[{enabled,true}]},{{node,'ns_1@10.2.1.100',memcached},[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307259}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.101',memcached},[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.100',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]}]},{basic_info,[{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,2218},{memory_data,{4284698624,3351322624,{<10870.307.0>,4114268}}},{disk_data,[{\"C:\\\",49423972,41},{\"D:\\\",52797620,0},{\"G:\\\",34724465,17}]}]},{processes,[{<10870.0.0>,[{registered_name,init},{status,waiting},{initial_call,{otp_ring0,start,2}},{backtrace,<<830 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,91}]},{heap_size,1597},{total_heap_size,3194},{links,[<10870.6.0>,<10870.7.0>,<10870.3.0>]},{memory,13192},{message_queue_len,0},{reductions,27650},{trap_exit,true}]},{<10870.3.0>,[{registered_name,erl_prim_loader},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<620 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,362}]},{heap_size,1597},{total_heap_size,12543},{links,[#Port<10870.1>,<10870.0.0>]},{memory,50568},{message_queue_len,0},{reductions,1020771},{trap_exit,true}]},{<10870.6.0>,[{registered_name,error_logger},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<552 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,4181},{total_heap_size,8362},{links,[<10870.0.0>,<10870.31.0>,#Port<10870.1578>]},{memory,33924},{message_queue_len,0},{reductions,130765},{trap_exit,true}]},{<10870.7.0>,[{registered_name,application_controller},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<530 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,25}]},{heap_size,2584},{total_heap_size,20295},{links,[<10870.50.0>,<10870.59.0>,<10870.237.0>,<10870.9.0>,<10870.39.0>,<10870.0.0>]},{memory,81716},{message_queue_len,0},{reductions,31489},{trap_exit,true}]},{<10870.9.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1414 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<10870.7.0>,<10870.10.0>]},{memory,3472},{message_queue_len,0},{reductions,44},{trap_exit,true}]},{<10870.10.0>,[{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<228 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.9.0>,<10870.11.0>]},{memory,1328},{message_queue_len,0},{reductions,72},{trap_exit,true}]},{<10870.11.0>,[{registered_name,kernel_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1623 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,4181},{total_heap_size,8362},{links,[<10870.26.0>,<10870.29.0>,<10870.33.0>,<10870.34.0>,<10870.27.0>,<10870.17.0>,<10870.24.0>,<10870.25.0>,<10870.18.0>,<10870.12.0>,<10870.13.0>,<10870.10.0>]},{memory,34104},{message_queue_len,0},{reductions,3131},{trap_exit,true}]},{<10870.12.0>,[{registered_name,rex},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<453 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,187}]},{heap_size,987},{total_heap_size,1364},{links,[<10870.11.0>]},{memory,5996},{message_queue_len,0},{reductions,55620},{trap_exit,true}]},{<10870.13.0>,[{registered_name,global_name_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<538 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,8}]},{heap_size,1597},{total_heap_size,1974},{links,[<10870.14.0>,<10870.16.0>,<10870.15.0>,<10870.11.0>]},{memory,8504},{message_queue_len,0},{reductions,2085},{trap_exit,true}]},{<10870.14.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<339 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,610},{total_heap_size,987},{links,[<10870.13.0>]},{memory,4324},{message_queue_len,0},{reductions,393},{trap_exit,true}]},{<10870.15.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<297 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,377},{total_heap_size,754},{links,[<10870.13.0>]},{memory,3392},{message_queue_len,0},{reductions,232},{trap_exit,false}]},{<10870.16.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<176 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,610},{total_heap_size,987},{links,[<10870.13.0>]},{memory,4324},{message_queue_len,0},{reductions,249},{trap_exit,false}]},{<10870.17.0>,[{registered_name,inet_db},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<498 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,17}]},{heap_size,377},{total_heap_size,754},{links,[<10870.11.0>]},{memory,3452},{message_queue_len,0},{reductions,1754},{trap_exit,true}]},{<10870.18.0>,[{registered_name,net_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<870 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,987},{links,[<10870.19.0>,<10870.20.0>,<10870.21.0>,<10870.11.0>]},{memory,4444},{message_queue_len,0},{reductions,265},{trap_exit,true}]},{<10870.19.0>,[{registered_name,erl_epmd},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<409 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.18.0>,#Port<10870.473>]},{memory,1388},{message_queue_len,0},{reductions,135},{trap_exit,false}]},{<10870.20.0>,[{registered_name,auth},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<397 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,610},{total_heap_size,987},{links,[<10870.18.0>]},{memory,4384},{message_queue_len,0},{reductions,397},{trap_exit,true}]},{<10870.21.0>,[{registered_name,net_kernel},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<651 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,10}]},{heap_size,2584},{total_heap_size,2961},{links,[<10870.23.0>,<10870.182.0>,<10870.186.0>,<10870.18.0>,<10870.22.0>,#Port<10870.460>]},{memory,12396},{message_queue_len,0},{reductions,3973},{trap_exit,true}]},{<10870.22.0>,[{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,accept_loop,2}},{backtrace,<<385 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,377},{total_heap_size,754},{links,[<10870.21.0>]},{memory,3432},{message_queue_len,0},{reductions,789},{trap_exit,false}]},{<10870.23.0>,[{registered_name,[]},{status,waiting},{initial_call,{net_kernel,ticker,2}},{backtrace,<<194 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.21.0>]},{memory,1308},{message_queue_len,0},{reductions,297},{trap_exit,false}]},{<10870.24.0>,[{registered_name,global_group},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<456 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.11.0>]},{memory,1368},{message_queue_len,0},{reductions,76},{trap_exit,true}]},{<10870.25.0>,[{registered_name,file_server_2},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<398 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1396}]},{heap_size,6765},{total_heap_size,13530},{links,[#Port<10870.496>,<10870.11.0>]},{memory,54576},{message_queue_len,0},{reductions,1945575},{trap_exit,true}]},{<10870.26.0>,[{registered_name,code_server},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<2875 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,221}]},{heap_size,4181},{total_heap_size,21892},{links,[<10870.11.0>]},{memory,87944},{message_queue_len,0},{reductions,193766},{trap_exit,true}]},{<10870.27.0>,[{registered_name,standard_error_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<464 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.11.0>,<10870.28.0>]},{memory,1388},{message_queue_len,0},{reductions,40},{trap_exit,true}]},{<10870.28.0>,[{registered_name,standard_error},{status,waiting},{initial_call,{standard_error,server,2}},{backtrace,<<187 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.27.0>,#Port<10870.792>]},{memory,1388},{message_queue_len,0},{reductions,7},{trap_exit,true}]},{<10870.29.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<441 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,1597},{total_heap_size,1597},{links,[<10870.11.0>,<10870.31.0>]},{memory,6844},{message_queue_len,0},{reductions,166},{trap_exit,true}]},{<10870.31.0>,[{registered_name,user},{status,waiting},{initial_call,{user,server,2}},{backtrace,<<728 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,35}]},{heap_size,1597},{total_heap_size,5778},{links,[<10870.29.0>,<10870.32.0>,#Port<10870.830>,<10870.6.0>]},{memory,23648},{message_queue_len,0},{reductions,40320},{trap_exit,true}]},{<10870.32.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<404 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,2584},{total_heap_size,20295},{links,[<10870.47.0>,<10870.48.0>,<10870.31.0>]},{memory,81656},{message_queue_len,0},{reductions,5170},{trap_exit,true}]},{<10870.33.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<385 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.11.0>]},{memory,1368},{message_queue_len,0},{reductions,268},{trap_exit,true}]},{<10870.34.0>,[{registered_name,kernel_safe_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1044 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,610},{total_heap_size,987},{links,[<10870.130.0>,<10870.133.0>,<10870.134.0>,<10870.57.0>,<10870.129.0>,<10870.11.0>]},{memory,4484},{message_queue_len,0},{reductions,388},{trap_exit,true}]},{<10870.39.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<700 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.7.0>,<10870.40.0>]},{memory,1388},{message_queue_len,0},{reductions,23},{trap_exit,true}]},{<10870.40.0>,[{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<246 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.39.0>,<10870.41.0>]},{memory,1328},{message_queue_len,0},{reductions,70},{trap_exit,true}]},{<10870.41.0>,[{registered_name,sasl_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<774 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<10870.42.0>,<10870.45.0>,<10870.40.0>]},{memory,2916},{message_queue_len,0},{reductions,158},{trap_exit,true}]},{<10870.42.0>,[{registered_name,sasl_safe_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<748 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<10870.43.0>,<10870.44.0>,<10870.41.0>]},{memory,2916},{message_queue_len,0},{reductions,174},{trap_exit,true}]},{<10870.43.0>,[{registered_name,alarm_handler},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<405 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.42.0>]},{memory,1368},{message_queue_len,0},{reductions,28},{trap_exit,true}]},{<10870.44.0>,[{registered_name,overload},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<433 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.42.0>]},{memory,1368},{message_queue_len,0},{reductions,39},{trap_exit,false}]},{<10870.45.0>,[{registered_name,release_handler},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<645 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,610},{total_heap_size,987},{links,[<10870.41.0>]},{memory,4384},{message_queue_len,0},{reductions,1249},{trap_exit,false}]},{<10870.47.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<223 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.32.0>]},{memory,1308},{message_queue_len,0},{reductions,8},{trap_exit,false}]},{<10870.48.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<351 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.32.0>]},{memory,1348},{message_queue_len,0},{reductions,14},{trap_exit,false}]},{<10870.50.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<568 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.7.0>,<10870.51.0>]},{memory,1388},{message_queue_len,0},{reductions,23},{trap_exit,true}]},{<10870.51.0>,[{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<229 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.50.0>,<10870.52.0>]},{memory,1328},{message_queue_len,0},{reductions,40},{trap_exit,true}]},{<10870.52.0>,[{registered_name,os_mon_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<828 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<10870.53.0>,<10870.54.0>,<10870.55.0>,<10870.51.0>]},{memory,3512},{message_queue_len,0},{reductions,274},{trap_exit,true}]},{<10870.53.0>,[{registered_name,os_mon_sysinfo},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<411 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,53}]},{heap_size,1597},{total_heap_size,1974},{links,[<10870.52.0>,#Port<10870.1438>]},{memory,8352},{message_queue_len,0},{reductions,4846},{trap_exit,true}]},{<10870.54.0>,[{registered_name,disksup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<473 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,103}]},{heap_size,233},{total_heap_size,1830},{links,[<10870.52.0>]},{memory,7756},{message_queue_len,0},{reductions,39319},{trap_exit,true}]},{<10870.55.0>,[{registered_name,memsup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<525 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,87}]},{heap_size,377},{total_heap_size,987},{links,[<10870.52.0>]},{memory,4384},{message_queue_len,0},{reductions,88068},{trap_exit,true}]},{<10870.57.0>,[{registered_name,timer_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<376 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,586}]},{heap_size,2584},{total_heap_size,2961},{links,[<10870.199.0>,<10870.249.0>,<10870.342.0>,<10870.507.0>,<10870.269.0>,<10870.200.0>,<10870.245.0>,<10870.172.0>,<10870.179.0>,<10870.34.0>]},{memory,12460},{message_queue_len,0},{reductions,145568},{trap_exit,true}]},{<10870.59.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<739 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<10870.7.0>,<10870.60.0>]},{memory,1964},{message_queue_len,0},{reductions,46},{trap_exit,true}]},{<10870.60.0>,[{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<232 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.59.0>,<10870.61.0>]},{memory,1328},{message_queue_len,0},{reductions,50},{trap_exit,true}]},{<10870.61.0>,[{registered_name,ns_server_cluster_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1185 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,377},{total_heap_size,987},{links,[<10870.62.0>,<10870.64.0>,<10870.171.0>,<10870.63.0>,<10870.60.0>]},{memory,4464},{message_queue_len,0},{reductions,2794},{trap_exit,true}]},{<10870.62.0>,[{registered_name,dist_manager},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<411 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.61.0>]},{memory,1368},{message_queue_len,0},{reductions,132},{trap_exit,false}]},{<10870.63.0>,[{registered_name,ns_cluster},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<389 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,10}]},{heap_size,10946},{total_heap_size,15127},{links,[<10870.61.0>]},{memory,60944},{message_queue_len,0},{reductions,13559},{trap_exit,false}]},{<10870.64.0>,[{registered_name,ns_config_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1004 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,6765},{total_heap_size,7142},{links,[<10870.65.0>,<10870.66.0>,<10870.61.0>]},{memory,29044},{message_queue_len,0},{reductions,1037},{trap_exit,true}]},{<10870.65.0>,[{registered_name,ns_config_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1347 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,835},{total_heap_size,835},{links,[<10870.230.0>,<10870.267.0>,<10870.308.0>,<10870.64.0>]},{memory,3876},{message_queue_len,0},{reductions,430155},{trap_exit,true}]},{<10870.66.0>,[{registered_name,ns_config},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<44579 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,15}]},{heap_size,75025},{total_heap_size,121393},{links,[<10870.64.0>]},{memory,486008},{message_queue_len,0},{reductions,355416},{trap_exit,false}]},{<10870.129.0>,[{registered_name,disk_log_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<691 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,21}]},{heap_size,377},{total_heap_size,987},{links,[<10870.253.0>,<10870.34.0>]},{memory,4404},{message_queue_len,0},{reductions,5272},{trap_exit,true}]},{<10870.130.0>,[{registered_name,disk_log_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<402 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,25}]},{heap_size,1597},{total_heap_size,2584},{links,[<10870.253.0>,<10870.34.0>]},{memory,10792},{message_queue_len,0},{reductions,6194},{trap_exit,true}]},{<10870.133.0>,[{registered_name,dets_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<647 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,7}]},{heap_size,377},{total_heap_size,987},{links,[<10870.34.0>]},{memory,4384},{message_queue_len,0},{reductions,720},{trap_exit,true}]},{<10870.134.0>,[{registered_name,dets},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<404 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,10}]},{heap_size,610},{total_heap_size,1597},{links,[<10870.34.0>]},{memory,6824},{message_queue_len,0},{reductions,1171},{trap_exit,true}]},{<10870.171.0>,[{registered_name,ns_server_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<2546 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,46368},{total_heap_size,121393},{links,[<10870.216.0>,<10870.229.0>,<10870.266.0>,<10870.308.0>,<10870.309.0>,<10870.267.0>,<10870.233.0>,<10870.234.0>,<10870.230.0>,<10870.227.0>,<10870.228.0>,<10870.221.0>,<10870.174.0>,<10870.199.0>,<10870.200.0>,<10870.177.0>,<10870.172.0>,<10870.173.0>,<10870.61.0>]},{memory,486368},{message_queue_len,0},{reductions,55933},{trap_exit,true}]},{<10870.172.0>,[{registered_name,ns_log},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<5914 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,6765},{total_heap_size,13530},{links,[<10870.57.0>,<10870.171.0>]},{memory,54576},{message_queue_len,0},{reductions,6688},{trap_exit,false}]},{<10870.173.0>,[{registered_name,ns_log_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<223 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,8}]},{heap_size,34},{total_heap_size,34},{links,[<10870.171.0>]},{memory,572},{message_queue_len,0},{reductions,5131},{trap_exit,true}]},{<10870.174.0>,[{registered_name,ns_mail_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<746 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<10870.175.0>,<10870.171.0>]},{memory,2896},{message_queue_len,0},{reductions,664},{trap_exit,true}]},{<10870.175.0>,[{registered_name,ns_mail},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<388 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.174.0>]},{memory,1368},{message_queue_len,0},{reductions,27},{trap_exit,true}]},{<10870.177.0>,[{registered_name,ns_node_disco_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1094 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,233},{total_heap_size,610},{links,[<10870.179.0>,<10870.191.0>,<10870.178.0>,<10870.171.0>]},{memory,2936},{message_queue_len,0},{reductions,805},{trap_exit,true}]},{<10870.178.0>,[{registered_name,ns_node_disco_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<413 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,66},{total_heap_size,66},{links,[<10870.177.0>]},{memory,740},{message_queue_len,0},{reductions,111},{trap_exit,true}]},{<10870.179.0>,[{registered_name,ns_node_disco},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<479 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,184}]},{heap_size,46368},{total_heap_size,53133},{links,[<10870.177.0>,<10870.57.0>]},{memory,212988},{message_queue_len,0},{reductions,243491},{trap_exit,false}]},{<10870.182.0>,[{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,do_setup,6}},{backtrace,<<452 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,16}]},{heap_size,2584},{total_heap_size,2961},{links,[<10870.21.0>,#Port<10870.3247>]},{memory,12240},{message_queue_len,0},{reductions,4087},{trap_exit,false}]},{<10870.186.0>,[{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,do_accept,6}},{backtrace,<<452 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,15}]},{heap_size,233},{total_heap_size,610},{links,[<10870.21.0>,#Port<10870.3249>]},{memory,2836},{message_queue_len,0},{reductions,3720},{trap_exit,false}]},{<10870.191.0>,[{registered_name,ns_config_rep},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<396 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,26}]},{heap_size,46368},{total_heap_size,75025},{links,[<10870.177.0>]},{memory,300536},{message_queue_len,0},{reductions,42693},{trap_exit,false}]},{<10870.199.0>,[{registered_name,ns_heart},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<748 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,314}]},{heap_size,6765},{total_heap_size,53133},{links,[<10870.171.0>,<10870.57.0>]},{memory,212988},{message_queue_len,0},{reductions,3957929},{trap_exit,false}]},{<10870.200.0>,[{registered_name,ns_doctor},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<3450 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,654}]},{heap_size,6765},{total_heap_size,9349},{links,[<10870.171.0>,<10870.57.0>]},{memory,37852},{message_queue_len,0},{reductions,279031},{trap_exit,false}]},{<10870.216.0>,[{registered_name,menelaus_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<916 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,4181},{total_heap_size,32838},{links,[<10870.217.0>,<10870.507.0>,<10870.171.0>]},{memory,131828},{message_queue_len,0},{reductions,4564},{trap_exit,true}]},{<10870.217.0>,[{registered_name,menelaus_web},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<522 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,377},{total_heap_size,987},{links,[<10870.216.0>,<10870.218.0>,<10870.281.0>,#Port<10870.3254>]},{memory,4444},{message_queue_len,0},{reductions,335},{trap_exit,true}]},{<10870.218.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<16473 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,29}]},{heap_size,75025},{total_heap_size,150050},{links,[<10870.217.0>,#Port<10870.3380>]},{memory,600736},{message_queue_len,0},{reductions,9767093},{trap_exit,false}]},{<10870.221.0>,[{registered_name,ns_port_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<2878 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,46368},{total_heap_size,75025},{links,[<10870.223.0>,<10870.225.0>,<10870.171.0>]},{memory,300576},{message_queue_len,0},{reductions,7425},{trap_exit,true}]},{<10870.223.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<437 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,1597},{total_heap_size,1597},{links,[<10870.221.0>,<10870.224.0>]},{memory,6844},{message_queue_len,0},{reductions,149},{trap_exit,true}]},{<10870.224.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<588 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,2584},{total_heap_size,5168},{links,[<10870.223.0>,#Port<10870.3255>]},{memory,21128},{message_queue_len,0},{reductions,305},{trap_exit,true}]},{<10870.225.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<442 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,987},{total_heap_size,987},{links,[<10870.221.0>,<10870.226.0>]},{memory,4404},{message_queue_len,0},{reductions,55},{trap_exit,true}]},{<10870.226.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<555 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,31}]},{heap_size,2584},{total_heap_size,4181},{links,[<10870.225.0>,#Port<10870.3256>]},{memory,17180},{message_queue_len,0},{reductions,18990},{trap_exit,true}]},{<10870.227.0>,[{registered_name,ns_tick_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<457 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,117}]},{heap_size,610},{total_heap_size,987},{links,[<10870.171.0>,<10870.268.0>]},{memory,4404},{message_queue_len,0},{reductions,24441},{trap_exit,true}]},{<10870.228.0>,[{registered_name,ns_stats_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<458 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,704}]},{heap_size,1597},{total_heap_size,2584},{links,[<10870.171.0>,<10870.269.0>]},{memory,10792},{message_queue_len,0},{reductions,64076},{trap_exit,true}]},{<10870.229.0>,[{registered_name,ns_good_bucket_worker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<396 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,233},{total_heap_size,610},{links,[<10870.171.0>]},{memory,2876},{message_queue_len,0},{reductions,167},{trap_exit,false}]},{<10870.230.0>,[{registered_name,ns_good_bucket_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<914 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,17711},{total_heap_size,17711},{links,[<10870.171.0>,<10870.231.0>,<10870.342.0>,<10870.65.0>]},{memory,71340},{message_queue_len,0},{reductions,1557},{trap_exit,true}]},{<10870.231.0>,[{registered_name,'ns_vbm_sup-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1941 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,16}]},{heap_size,987},{total_heap_size,7752},{links,[<10870.230.0>,<10870.2984.0>]},{memory,31464},{message_queue_len,0},{reductions,35169},{trap_exit,true}]},{<10870.233.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<342 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.171.0>]},{memory,1348},{message_queue_len,0},{reductions,6},{trap_exit,false}]},{<10870.234.0>,[{registered_name,ns_mnesia},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<388 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,1597},{total_heap_size,2584},{links,[<10870.243.0>,<10870.171.0>]},{memory,10792},{message_queue_len,0},{reductions,1477},{trap_exit,true}]},{<10870.235.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<230 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.110.0>]},{memory,1364},{message_queue_len,0},{reductions,13},{trap_exit,true}]},{<10870.237.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1045 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1597},{links,[<10870.7.0>,<10870.238.0>]},{memory,6844},{message_queue_len,0},{reductions,81},{trap_exit,true}]},{<10870.238.0>,[{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<244 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.237.0>,<10870.239.0>]},{memory,1328},{message_queue_len,0},{reductions,32},{trap_exit,true}]},{<10870.239.0>,[{registered_name,mnesia_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<807 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<10870.240.0>,<10870.241.0>,<10870.238.0>]},{memory,3492},{message_queue_len,0},{reductions,198},{trap_exit,true}]},{<10870.240.0>,[{registered_name,mnesia_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<421 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1597},{links,[<10870.239.0>,<10870.243.0>]},{memory,6844},{message_queue_len,0},{reductions,398},{trap_exit,true}]},{<10870.241.0>,[{registered_name,mnesia_kernel_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1660 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,7}]},{heap_size,377},{total_heap_size,754},{links,[<10870.244.0>,<10870.248.0>,<10870.249.0>,<10870.250.0>,<10870.246.0>,<10870.247.0>,<10870.245.0>,<10870.242.0>,<10870.243.0>,<10870.239.0>]},{memory,3632},{message_queue_len,0},{reductions,551},{trap_exit,true}]},{<10870.242.0>,[{registered_name,mnesia_monitor},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<435 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,35}]},{heap_size,233},{total_heap_size,843},{links,[<10870.253.0>,<10870.241.0>]},{memory,3828},{message_queue_len,0},{reductions,7448},{trap_exit,true}]},{<10870.243.0>,[{registered_name,mnesia_subscr},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<412 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.240.0>,<10870.241.0>,<10870.234.0>]},{memory,1408},{message_queue_len,0},{reductions,111},{trap_exit,true}]},{<10870.244.0>,[{registered_name,mnesia_locker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<562 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,749}]},{heap_size,1597},{total_heap_size,1974},{links,[<10870.241.0>]},{memory,8332},{message_queue_len,0},{reductions,324435},{trap_exit,true}]},{<10870.245.0>,[{registered_name,mnesia_recover},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<448 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,7}]},{heap_size,2584},{total_heap_size,2961},{links,[<10870.241.0>,<10870.57.0>]},{memory,12300},{message_queue_len,0},{reductions,4308},{trap_exit,true}]},{<10870.246.0>,[{registered_name,mnesia_tm},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<626 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,679}]},{heap_size,610},{total_heap_size,987},{links,[<10870.241.0>]},{memory,4384},{message_queue_len,0},{reductions,742548},{trap_exit,true}]},{<10870.247.0>,[{registered_name,mnesia_checkpoint_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<760 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.241.0>]},{memory,1368},{message_queue_len,0},{reductions,61},{trap_exit,true}]},{<10870.248.0>,[{registered_name,mnesia_snmp_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<728 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.241.0>]},{memory,1368},{message_queue_len,0},{reductions,61},{trap_exit,true}]},{<10870.249.0>,[{registered_name,mnesia_controller},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<497 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,610},{total_heap_size,987},{links,[<10870.241.0>,<10870.57.0>]},{memory,4404},{message_queue_len,0},{reductions,867},{trap_exit,true}]},{<10870.250.0>,[{registered_name,mnesia_late_loader},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<536 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<10870.241.0>]},{memory,1944},{message_queue_len,0},{reductions,178},{trap_exit,false}]},{<10870.253.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<634 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,1597},{total_heap_size,1597},{links,[<10870.130.0>,<10870.242.0>,<10870.129.0>,#Port<10870.3945>]},{memory,6884},{message_queue_len,0},{reductions,234956},{trap_exit,true}]},{<10870.266.0>,[{registered_name,ns_bad_bucket_worker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<395 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1597},{links,[<10870.171.0>]},{memory,6824},{message_queue_len,0},{reductions,221},{trap_exit,false}]},{<10870.267.0>,[{registered_name,ns_bad_bucket_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1019 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,17711},{total_heap_size,17711},{links,[<10870.171.0>,<10870.269.0>,<10870.307.0>,<10870.268.0>,<10870.65.0>]},{memory,71360},{message_queue_len,0},{reductions,1595},{trap_exit,true}]},{<10870.268.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<515 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,6765},{total_heap_size,17711},{links,[<10870.267.0>,<10870.227.0>]},{memory,71300},{message_queue_len,0},{reductions,15252247},{trap_exit,false}]},{<10870.269.0>,[{registered_name,'stats_archiver-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<418 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,58}]},{heap_size,4181},{total_heap_size,21892},{links,[<10870.228.0>,<10870.267.0>,<10870.57.0>]},{memory,88044},{message_queue_len,0},{reductions,11283941},{trap_exit,false}]},{<10870.281.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<606 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<10870.217.0>]},{memory,1984},{message_queue_len,0},{reductions,18},{trap_exit,false}]},{<10870.307.0>,[{registered_name,'stats_reader-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<414 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,31}]},{heap_size,514229},{total_heap_size,1028458},{links,[<10870.267.0>]},{memory,4114268},{message_queue_len,0},{reductions,2728740},{trap_exit,false}]},{<10870.308.0>,[{registered_name,ns_moxi_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<575 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,17711},{total_heap_size,17711},{links,[<10870.171.0>,<10870.65.0>]},{memory,71300},{message_queue_len,0},{reductions,2139},{trap_exit,true}]},{<10870.309.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<334 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10870.171.0>]},{memory,1348},{message_queue_len,0},{reductions,6},{trap_exit,false}]},{<10870.310.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<230 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.152.0>]},{memory,1364},{message_queue_len,0},{reductions,13},{trap_exit,true}]},{<10870.342.0>,[{registered_name,'ns_memcached-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<415 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,25}]},{heap_size,6765},{total_heap_size,13530},{links,[#Port<10870.3448>,#Port<10870.3457>,#Port<10870.3462>,<10870.230.0>,<10870.57.0>,#Port<10870.3460>,#Port<10870.3451>,#Port<10870.3453>,#Port<10870.3450>,#Port<10870.3438>,#Port<10870.3445>,#Port<10870.3447>,#Port<10870.3442>,#Port<10870.3433>,#Port<10870.3436>,#Port<10870.3430>]},{memory,54856},{message_queue_len,0},{reductions,67434655},{trap_exit,true}]},{<10870.507.0>,[{registered_name,hot_keys_keeper},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<5217 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,60}]},{heap_size,17711},{total_heap_size,46368},{links,[<10870.216.0>,<10870.57.0>]},{memory,185928},{message_queue_len,0},{reductions,125302},{trap_exit,false}]},{<10870.2984.0>,[{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<584 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1974},{links,[<10870.231.0>,#Port<10870.3835>]},{memory,8352},{message_queue_len,0},{reductions,338},{trap_exit,true}]},{<10870.3767.0>,[{registered_name,[]},{status,running},{initial_call,{erlang,apply,2}},{backtrace,<<96008 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,28657},{total_heap_size,28657},{links,[]},{memory,115024},{message_queue_len,0},{reductions,18419},{trap_exit,false}]}]},{memory,{4284698624,3351322624,{<10870.307.0>,4114268}}},{disk,[{\"C:\\\",49423972,41},{\"D:\\\",52797620,0},{\"G:\\\",34724465,17}]}]}]\ny(3) []\ny(4) rex\ny(5) #Ref<0.0.0.246339>\ny(6) [{'ns_1@10.2.1.100',#Ref<0.0.0.246342>}]\ny(7) #Ref<0.0.0.246343>\ny(8) 'ns_1@10.2.1.101'\n\n0x05b27964 Return addr 0x00bdc194 ()\ny(0) #Ref<0.0.0.246339>\ny(1) []\ny(2) []\ny(3) []\ny(4) []\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,28657}, {total_heap_size,28657}, {links,[]}, {memory,115144}, {message_queue_len,0}, {reductions,2466}, {trap_exit,true}]}, {<0.12007.0>, [{registered_name,[]}, {status,running}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x00e22cd0 (unknown function)\nCP: 0x04c0a96c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 56)\n\n0x0501b6b8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) []\ny(1) <0.12007.0>\n\n0x0501b6c4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<86148 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,28657},{total_heap_size,28657},{links,[]},{memory,115144},{message_queue_len,0},{reductions,2466},{trap_exit,true}]\ny(1) <0.12006.0>\n\n0x0501b6d0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<606 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.96.0>]},{memory,1984},{message_queue_len,0},{reductions,18},{trap_exit,false}]\ny(1) <0.12004.0>\n\n0x0501b6dc Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<6644 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,12}]},{heap_size,17711},{total_heap_size,46368},{links,[<0.110.0>]},{memory,185948},{message_queue_len,0},{reductions,2374556},{trap_exit,false}]\ny(1) <0.11992.0>\n\n0x0501b6e8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<55717 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,28657},{total_heap_size,75025},{links,[<0.96.0>,#Port<0.6855>]},{memory,300636},{message_queue_len,0},{reductions,44256},{trap_exit,false}]\ny(1) <0.11987.0>\n\n0x0501b6f4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<585 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,6765},{total_heap_size,6765},{links,[<0.260.0>,#Port<0.6779>]},{memory,27516},{message_queue_len,0},{reductions,243},{trap_exit,true}]\ny(1) <0.11394.0>\n\n0x0501b700 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<450 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,28657},{total_heap_size,103682},{links,[<0.96.0>,#Port<0.6771>]},{memory,415184},{message_queue_len,0},{reductions,699866},{trap_exit,false}]\ny(1) <0.11252.0>\n\n0x0501b70c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<4905 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,15}]},{heap_size,46368},{total_heap_size,121393},{links,[<0.96.0>,#Port<0.6735>]},{memory,486108},{message_queue_len,0},{reductions,971264},{trap_exit,false}]\ny(1) <0.10944.0>\n\n0x0501b718 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<3850 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,46368},{total_heap_size,50549},{links,[<0.96.0>,#Port<0.6713>]},{memory,202732},{message_queue_len,0},{reductions,7780421},{trap_exit,false}]\ny(1) <0.10782.0>\n\n0x0501b724 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,do_accept,6}},{backtrace,<<452 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,14}]},{heap_size,233},{total_heap_size,610},{links,[<0.21.0>,#Port<0.5832>]},{memory,2836},{message_queue_len,0},{reductions,3719},{trap_exit,false}]\ny(1) <0.5157.0>\n\n0x0501b730 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,hot_keys_keeper},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<5226 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,99}]},{heap_size,46368},{total_heap_size,75025},{links,[<0.95.0>,<0.57.0>]},{memory,300556},{message_queue_len,0},{reductions,186062},{trap_exit,false}]\ny(1) <0.609.0>\n\n0x0501b73c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,do_accept,6}},{backtrace,<<453 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,44}]},{heap_size,233},{total_heap_size,610},{links,[<0.21.0>,#Port<0.4307>]},{memory,2836},{message_queue_len,0},{reductions,5145},{trap_exit,false}]\ny(1) <0.449.0>\n\n0x0501b748 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,tftp_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<570 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.429.0>]},{memory,1368},{message_queue_len,0},{reductions,46},{trap_exit,true}]\ny(1) <0.436.0>\n\n0x0501b754 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,httpd_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<573 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.429.0>]},{memory,1368},{message_queue_len,0},{reductions,45},{trap_exit,true}]\ny(1) <0.435.0>\n\n0x0501b760 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,httpc_handler_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<696 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,6765},{total_heap_size,10946},{links,[<0.431.0>]},{memory,44220},{message_queue_len,0},{reductions,626},{trap_exit,true}]\ny(1) <0.434.0>\n\n0x0501b76c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,httpc_manager},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<540 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,10946},{total_heap_size,15127},{links,[<0.432.0>]},{memory,60944},{message_queue_len,0},{reductions,1086},{trap_exit,true}]\ny(1) <0.433.0>\n\n0x0501b778 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,httpc_profile_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<764 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.431.0>,<0.433.0>]},{memory,1388},{message_queue_len,0},{reductions,124},{trap_exit,true}]\ny(1) <0.432.0>\n\n0x0501b784 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,httpc_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<890 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<0.432.0>,<0.434.0>,<0.429.0>]},{memory,2916},{message_queue_len,0},{reductions,175},{trap_exit,true}]\ny(1) <0.431.0>\n\n0x0501b790 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ftp_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<646 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.429.0>]},{memory,1368},{message_queue_len,0},{reductions,54},{trap_exit,true}]\ny(1) <0.430.0>\n\n0x0501b79c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,inets_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<989 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,377},{total_heap_size,754},{links,[<0.430.0>,<0.435.0>,<0.436.0>,<0.431.0>,<0.428.0>]},{memory,3532},{message_queue_len,0},{reductions,320},{trap_exit,true}]\ny(1) <0.429.0>\n\n0x0501b7a8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<234 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.427.0>,<0.429.0>]},{memory,1328},{message_queue_len,0},{reductions,42},{trap_exit,true}]\ny(1) <0.428.0>\n\n0x0501b7b4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1330 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.7.0>,<0.428.0>]},{memory,1964},{message_queue_len,0},{reductions,42},{trap_exit,true}]\ny(1) <0.427.0>\n\n0x0501b7c0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'ns_memcached-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<415 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,126}]},{heap_size,10946},{total_heap_size,85971},{links,[#Port<0.4329>,#Port<0.4363>,<0.57.0>,<0.109.0>,#Port<0.4370>,#Port<0.4348>,#Port<0.4356>,#Port<0.4340>,#Port<0.3448>,#Port<0.3461>,#Port<0.4315>,#Port<0.3454>,#Port<0.3433>,#Port<0.3441>,#Port<0.3429>]},{memory,344600},{message_queue_len,0},{reductions,105919253},{trap_exit,true}]\ny(1) <0.322.0>\n\n0x0501b7cc Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<588 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,2584},{total_heap_size,5168},{links,[<0.300.0>,#Port<0.3417>]},{memory,21128},{message_queue_len,0},{reductions,305},{trap_exit,true}]\ny(1) <0.301.0>\n\n0x0501b7d8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<437 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,1597},{total_heap_size,1597},{links,[<0.100.0>,<0.301.0>]},{memory,6844},{message_queue_len,0},{reductions,149},{trap_exit,true}]\ny(1) <0.300.0>\n\n0x0501b7e4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'stats_reader-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<414 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,108}]},{heap_size,317811},{total_heap_size,1664080},{links,[<0.150.0>]},{memory,6656756},{message_queue_len,0},{reductions,8089605},{trap_exit,false}]\ny(1) <0.299.0>\n\n0x0501b7f0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'stats_archiver-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<418 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,50}]},{heap_size,4181},{total_heap_size,15127},{links,[<0.107.0>,<0.150.0>,<0.57.0>]},{memory,60984},{message_queue_len,0},{reductions,16813545},{trap_exit,false}]\ny(1) <0.262.0>\n\n0x0501b7fc Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'ns_vbm_sup-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<6060 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,28657},{total_heap_size,57314},{links,[<0.109.0>,<0.11394.0>]},{memory,229712},{message_queue_len,0},{reductions,7803},{trap_exit,true}]\ny(1) <0.260.0>\n\n0x0501b808 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<529 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,18}]},{heap_size,6765},{total_heap_size,17711},{links,[<0.150.0>,<0.106.0>]},{memory,71300},{message_queue_len,0},{reductions,23051866},{trap_exit,false}]\ny(1) <0.259.0>\n\n0x0501b814 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<16473 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,120}]},{heap_size,121393},{total_heap_size,317811},{links,[<0.96.0>,#Port<0.3418>]},{memory,1271780},{message_queue_len,0},{reductions,13817997},{trap_exit,false}]\ny(1) <0.228.0>\n\n0x0501b820 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<397 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,214}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.71.0>,<11993.390.0>,<10870.310.0>,<0.57.0>]},{memory,12540},{message_queue_len,0},{reductions,141677},{trap_exit,false}]\ny(1) <0.152.0>\n\n0x0501b82c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_moxi_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<574 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,1597},{total_heap_size,12543},{links,[<0.71.0>,<0.65.0>]},{memory,50628},{message_queue_len,0},{reductions,1910},{trap_exit,true}]\ny(1) <0.151.0>\n\n0x0501b838 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_bad_bucket_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1018 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,6765},{total_heap_size,6765},{links,[<0.71.0>,<0.262.0>,<0.299.0>,<0.259.0>,<0.65.0>]},{memory,27576},{message_queue_len,0},{reductions,888},{trap_exit,true}]\ny(1) <0.150.0>\n\n0x0501b844 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_bad_bucket_worker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<394 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,987},{total_heap_size,1364},{links,[<0.71.0>]},{memory,5892},{message_queue_len,0},{reductions,421},{trap_exit,false}]\ny(1) <0.149.0>\n\n0x0501b850 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,dets},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<404 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,8}]},{heap_size,610},{total_heap_size,1597},{links,[<0.34.0>]},{memory,6824},{message_queue_len,0},{reductions,965},{trap_exit,true}]\ny(1) <0.135.0>\n\n0x0501b85c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,dets_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<647 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,377},{total_heap_size,987},{links,[<0.34.0>]},{memory,4384},{message_queue_len,0},{reductions,596},{trap_exit,true}]\ny(1) <0.134.0>\n\n0x0501b868 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<635 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,16}]},{heap_size,1597},{total_heap_size,4181},{links,[<0.129.0>,<0.130.0>,<0.118.0>,#Port<0.6833>]},{memory,17220},{message_queue_len,0},{reductions,346982},{trap_exit,true}]\ny(1) <0.131.0>\n\n0x0501b874 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,disk_log_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<402 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,33}]},{heap_size,2584},{total_heap_size,3571},{links,[<0.131.0>,<0.34.0>]},{memory,14740},{message_queue_len,0},{reductions,8451},{trap_exit,true}]\ny(1) <0.130.0>\n\n0x0501b880 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,disk_log_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<691 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,25}]},{heap_size,377},{total_heap_size,987},{links,[<0.131.0>,<0.34.0>]},{memory,4404},{message_queue_len,0},{reductions,7283},{trap_exit,true}]\ny(1) <0.129.0>\n\n0x0501b88c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_late_loader},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<536 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.117.0>]},{memory,1944},{message_queue_len,0},{reductions,178},{trap_exit,false}]\ny(1) <0.126.0>\n\n0x0501b898 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_controller},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<496 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,987},{total_heap_size,1364},{links,[<0.117.0>,<0.57.0>]},{memory,5912},{message_queue_len,0},{reductions,1087},{trap_exit,true}]\ny(1) <0.125.0>\n\n0x0501b8a4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_snmp_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<728 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.117.0>]},{memory,1368},{message_queue_len,0},{reductions,61},{trap_exit,true}]\ny(1) <0.124.0>\n\n0x0501b8b0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_checkpoint_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<760 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.117.0>]},{memory,1368},{message_queue_len,0},{reductions,61},{trap_exit,true}]\ny(1) <0.123.0>\n\n0x0501b8bc Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_tm},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<626 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,598}]},{heap_size,987},{total_heap_size,1597},{links,[<0.117.0>]},{memory,6824},{message_queue_len,0},{reductions,1091793},{trap_exit,true}]\ny(1) <0.122.0>\n\n0x0501b8c8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_recover},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<448 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,12}]},{heap_size,987},{total_heap_size,1364},{links,[<0.117.0>,<0.57.0>]},{memory,5912},{message_queue_len,0},{reductions,6325},{trap_exit,true}]\ny(1) <0.121.0>\n\n0x0501b8d4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_locker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<562 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,956}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.117.0>]},{memory,12280},{message_queue_len,0},{reductions,479677},{trap_exit,true}]\ny(1) <0.120.0>\n\n0x0501b8e0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_subscr},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<412 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.116.0>,<0.117.0>,<0.111.0>]},{memory,1408},{message_queue_len,0},{reductions,111},{trap_exit,true}]\ny(1) <0.119.0>\n\n0x0501b8ec Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_monitor},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<435 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,46}]},{heap_size,987},{total_heap_size,1597},{links,[<0.131.0>,<0.117.0>]},{memory,6844},{message_queue_len,0},{reductions,9624},{trap_exit,true}]\ny(1) <0.118.0>\n\n0x0501b8f8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_kernel_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1660 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,8}]},{heap_size,610},{total_heap_size,987},{links,[<0.120.0>,<0.124.0>,<0.125.0>,<0.126.0>,<0.122.0>,<0.123.0>,<0.121.0>,<0.118.0>,<0.119.0>,<0.115.0>]},{memory,4564},{message_queue_len,0},{reductions,599},{trap_exit,true}]\ny(1) <0.117.0>\n\n0x0501b904 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<421 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,6}]},{heap_size,610},{total_heap_size,987},{links,[<0.115.0>,<0.119.0>]},{memory,4404},{message_queue_len,0},{reductions,454},{trap_exit,true}]\ny(1) <0.116.0>\n\n0x0501b910 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<807 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<0.116.0>,<0.117.0>,<0.114.0>]},{memory,3492},{message_queue_len,0},{reductions,207},{trap_exit,true}]\ny(1) <0.115.0>\n\n0x0501b91c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<244 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.113.0>,<0.115.0>]},{memory,1328},{message_queue_len,0},{reductions,40},{trap_exit,true}]\ny(1) <0.114.0>\n\n0x0501b928 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1045 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1597},{links,[<0.7.0>,<0.114.0>]},{memory,6844},{message_queue_len,0},{reductions,82},{trap_exit,true}]\ny(1) <0.113.0>\n\n0x0501b934 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_mnesia},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<387 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,1597},{total_heap_size,4181},{links,[<0.119.0>,<0.71.0>]},{memory,17180},{message_queue_len,0},{reductions,1639},{trap_exit,true}]\ny(1) <0.111.0>\n\n0x0501b940 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<453 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,338}]},{heap_size,4181},{total_heap_size,15127},{links,[<0.71.0>,<11993.315.0>,<0.11992.0>,<10870.235.0>,<0.57.0>]},{memory,61224},{message_queue_len,0},{reductions,1179367},{trap_exit,true}]\ny(1) <0.110.0>\n\n0x0501b94c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_good_bucket_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<913 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,6765},{total_heap_size,6765},{links,[<0.71.0>,<0.260.0>,<0.322.0>,<0.65.0>]},{memory,27556},{message_queue_len,0},{reductions,847},{trap_exit,true}]\ny(1) <0.109.0>\n\n0x0501b958 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_good_bucket_worker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<395 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1597},{links,[<0.71.0>]},{memory,6824},{message_queue_len,0},{reductions,381},{trap_exit,false}]\ny(1) <0.108.0>\n\n0x0501b964 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_stats_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<457 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1195}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.262.0>,<0.71.0>]},{memory,12300},{message_queue_len,0},{reductions,140475},{trap_exit,true}]\ny(1) <0.107.0>\n\n0x0501b970 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_tick_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<456 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,172}]},{heap_size,610},{total_heap_size,987},{links,[<0.71.0>,<0.259.0>]},{memory,4404},{message_queue_len,0},{reductions,36107},{trap_exit,true}]\ny(1) <0.106.0>\n\n0x0501b97c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<634 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,69}]},{heap_size,6765},{total_heap_size,17711},{links,[<0.104.0>,#Port<0.1998>]},{memory,71300},{message_queue_len,0},{reductions,84187},{trap_exit,true}]\ny(1) <0.105.0>\n\n0x0501b988 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<442 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,987},{total_heap_size,987},{links,[<0.100.0>,<0.105.0>]},{memory,4404},{message_queue_len,0},{reductions,55},{trap_exit,true}]\ny(1) <0.104.0>\n\n0x0501b994 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_port_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<2877 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,28657},{total_heap_size,39603},{links,[<0.104.0>,<0.300.0>,<0.71.0>]},{memory,158888},{message_queue_len,0},{reductions,6655},{trap_exit,true}]\ny(1) <0.100.0>\n\n0x0501b9a0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,menelaus_web},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<523 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,39}]},{heap_size,610},{total_heap_size,1220},{links,[<0.10944.0>,<0.11987.0>,<0.12004.0>,<0.11252.0>,<0.228.0>,<0.10782.0>,<0.95.0>,#Port<0.1928>]},{memory,5456},{message_queue_len,0},{reductions,10871},{trap_exit,true}]\ny(1) <0.96.0>\n\n0x0501b9ac Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,menelaus_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<914 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,6}]},{heap_size,2584},{total_heap_size,13530},{links,[<0.96.0>,<0.609.0>,<0.71.0>]},{memory,54596},{message_queue_len,0},{reductions,3885},{trap_exit,true}]\ny(1) <0.95.0>\n\n0x0501b9b8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_doctor},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<3449 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1180}]},{heap_size,6765},{total_heap_size,10946},{links,[<0.71.0>,<0.57.0>]},{memory,44240},{message_queue_len,0},{reductions,817540},{trap_exit,false}]\ny(1) <0.93.0>\n\n0x0501b9c4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_heart},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<749 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2644}]},{heap_size,6765},{total_heap_size,53133},{links,[<0.71.0>,<0.57.0>]},{memory,212988},{message_queue_len,0},{reductions,6050002},{trap_exit,false}]\ny(1) <0.90.0>\n\n0x0501b9d0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config_rep},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<395 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,40}]},{heap_size,46368},{total_heap_size,57314},{links,[<0.77.0>]},{memory,229692},{message_queue_len,0},{reductions,61054},{trap_exit,false}]\ny(1) <0.85.0>\n\n0x0501b9dc Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_node_disco},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<478 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3161}]},{heap_size,46368},{total_heap_size,57314},{links,[<0.77.0>,<0.57.0>]},{memory,229712},{message_queue_len,0},{reductions,4048977},{trap_exit,false}]\ny(1) <0.79.0>\n\n0x0501b9e8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_node_disco_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<648 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,194}]},{heap_size,233},{total_heap_size,377},{links,[<0.77.0>]},{memory,2064},{message_queue_len,0},{reductions,50051},{trap_exit,true}]\ny(1) <0.78.0>\n\n0x0501b9f4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_node_disco_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1090 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,377},{total_heap_size,754},{links,[<0.78.0>,<0.79.0>,<0.85.0>,<0.71.0>]},{memory,3512},{message_queue_len,0},{reductions,841},{trap_exit,true}]\ny(1) <0.77.0>\n\n0x0501ba00 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_mail},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<387 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.74.0>]},{memory,1368},{message_queue_len,0},{reductions,27},{trap_exit,true}]\ny(1) <0.75.0>\n\n0x0501ba0c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_mail_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<744 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<0.75.0>,<0.71.0>]},{memory,3472},{message_queue_len,0},{reductions,680},{trap_exit,true}]\ny(1) <0.74.0>\n\n0x0501ba18 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_log_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<222 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,36}]},{heap_size,34},{total_heap_size,34},{links,[<0.71.0>]},{memory,572},{message_queue_len,0},{reductions,20327},{trap_exit,true}]\ny(1) <0.73.0>\n\n0x0501ba24 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_log},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<7361 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,11}]},{heap_size,6765},{total_heap_size,13530},{links,[<0.57.0>,<0.71.0>]},{memory,54576},{message_queue_len,0},{reductions,18180},{trap_exit,false}]\ny(1) <0.72.0>\n\n0x0501ba30 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_server_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<2539 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,46368},{total_heap_size,121393},{links,[<0.95.0>,<0.108.0>,<0.149.0>,<0.151.0>,<0.152.0>,<0.150.0>,<0.110.0>,<0.111.0>,<0.109.0>,<0.106.0>,<0.107.0>,<0.100.0>,<0.74.0>,<0.90.0>,<0.93.0>,<0.77.0>,<0.72.0>,<0.73.0>,<0.61.0>]},{memory,486368},{message_queue_len,0},{reductions,55965},{trap_exit,true}]\ny(1) <0.71.0>\n\n0x0501ba3c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<44579 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,8}]},{heap_size,46368},{total_heap_size,92736},{links,[<0.64.0>]},{memory,371380},{message_queue_len,0},{reductions,1056270},{trap_exit,false}]\ny(1) <0.66.0>\n\n0x0501ba48 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1286 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,66}]},{heap_size,610},{total_heap_size,1597},{links,[<0.109.0>,<0.150.0>,<0.151.0>,<0.64.0>]},{memory,7004},{message_queue_len,0},{reductions,935574},{trap_exit,true}]\ny(1) <0.65.0>\n\n0x0501ba54 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1004 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,6765},{total_heap_size,7142},{links,[<0.65.0>,<0.66.0>,<0.61.0>]},{memory,29044},{message_queue_len,0},{reductions,1037},{trap_exit,true}]\ny(1) <0.64.0>\n\n0x0501ba60 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_cluster},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<389 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,6}]},{heap_size,6765},{total_heap_size,10946},{links,[<0.61.0>]},{memory,44220},{message_queue_len,0},{reductions,43850},{trap_exit,false}]\ny(1) <0.63.0>\n\n0x0501ba6c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,dist_manager},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<411 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.61.0>]},{memory,1368},{message_queue_len,0},{reductions,132},{trap_exit,false}]\ny(1) <0.62.0>\n\n0x0501ba78 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_server_cluster_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1184 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,987},{links,[<0.62.0>,<0.64.0>,<0.71.0>,<0.63.0>,<0.60.0>]},{memory,4464},{message_queue_len,0},{reductions,2722},{trap_exit,true}]\ny(1) <0.61.0>\n\n0x0501ba84 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<232 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.59.0>,<0.61.0>]},{memory,1328},{message_queue_len,0},{reductions,50},{trap_exit,true}]\ny(1) <0.60.0>\n\n0x0501ba90 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<739 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.7.0>,<0.60.0>]},{memory,1964},{message_queue_len,0},{reductions,44},{trap_exit,true}]\ny(1) <0.59.0>\n\n0x0501ba9c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,timer_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<376 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,6531}]},{heap_size,1597},{total_heap_size,1974},{links,[<0.121.0>,<0.152.0>,<0.322.0>,<0.609.0>,<0.262.0>,<0.125.0>,<0.90.0>,<0.93.0>,<0.110.0>,<0.72.0>,<0.79.0>,<0.34.0>]},{memory,8552},{message_queue_len,0},{reductions,1966544},{trap_exit,true}]\ny(1) <0.57.0>\n\n0x0501baa8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,memsup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<526 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,35}]},{heap_size,1597},{total_heap_size,1974},{links,[<0.52.0>]},{memory,8332},{message_queue_len,0},{reductions,134769},{trap_exit,true}]\ny(1) <0.55.0>\n\n0x0501bab4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,disksup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<473 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,11}]},{heap_size,2584},{total_heap_size,5168},{links,[<0.52.0>]},{memory,21108},{message_queue_len,0},{reductions,58502},{trap_exit,true}]\ny(1) <0.54.0>\n\n0x0501bac0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,os_mon_sysinfo},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<411 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,18}]},{heap_size,2584},{total_heap_size,3194},{links,[<0.52.0>,#Port<0.1438>]},{memory,13232},{message_queue_len,0},{reductions,6883},{trap_exit,true}]\ny(1) <0.53.0>\n\n0x0501bacc Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,os_mon_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<828 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<0.53.0>,<0.54.0>,<0.55.0>,<0.51.0>]},{memory,3512},{message_queue_len,0},{reductions,274},{trap_exit,true}]\ny(1) <0.52.0>\n\n0x0501bad8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<229 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.50.0>,<0.52.0>]},{memory,1328},{message_queue_len,0},{reductions,40},{trap_exit,true}]\ny(1) <0.51.0>\n\n0x0501bae4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<568 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.7.0>,<0.51.0>]},{memory,1388},{message_queue_len,0},{reductions,23},{trap_exit,true}]\ny(1) <0.50.0>\n\n0x0501baf0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<351 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.32.0>]},{memory,1348},{message_queue_len,0},{reductions,14},{trap_exit,false}]\ny(1) <0.48.0>\n\n0x0501bafc Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<223 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.32.0>]},{memory,1308},{message_queue_len,0},{reductions,8},{trap_exit,false}]\ny(1) <0.47.0>\n\n0x0501bb08 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,release_handler},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<645 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,610},{total_heap_size,987},{links,[<0.41.0>]},{memory,4384},{message_queue_len,0},{reductions,1249},{trap_exit,false}]\ny(1) <0.45.0>\n\n0x0501bb14 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,overload},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<433 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.42.0>]},{memory,1368},{message_queue_len,0},{reductions,39},{trap_exit,false}]\ny(1) <0.44.0>\n\n0x0501bb20 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,alarm_handler},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<438 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.42.0>]},{memory,1368},{message_queue_len,0},{reductions,43},{trap_exit,true}]\ny(1) <0.43.0>\n\n0x0501bb2c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,sasl_safe_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<748 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<0.43.0>,<0.44.0>,<0.41.0>]},{memory,2916},{message_queue_len,0},{reductions,174},{trap_exit,true}]\ny(1) <0.42.0>\n\n0x0501bb38 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,sasl_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<774 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<0.42.0>,<0.45.0>,<0.40.0>]},{memory,2916},{message_queue_len,0},{reductions,158},{trap_exit,true}]\ny(1) <0.41.0>\n\n0x0501bb44 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<246 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.39.0>,<0.41.0>]},{memory,1328},{message_queue_len,0},{reductions,70},{trap_exit,true}]\ny(1) <0.40.0>\n\n0x0501bb50 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<700 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.7.0>,<0.40.0>]},{memory,1388},{message_queue_len,0},{reductions,23},{trap_exit,true}]\ny(1) <0.39.0>\n\n0x0501bb5c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,kernel_safe_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1044 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,610},{total_heap_size,987},{links,[<0.130.0>,<0.134.0>,<0.135.0>,<0.57.0>,<0.129.0>,<0.11.0>]},{memory,4484},{message_queue_len,0},{reductions,388},{trap_exit,true}]\ny(1) <0.34.0>\n\n0x0501bb68 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<385 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.11.0>]},{memory,1368},{message_queue_len,0},{reductions,268},{trap_exit,true}]\ny(1) <0.33.0>\n\n0x0501bb74 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<404 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,2584},{total_heap_size,20295},{links,[<0.47.0>,<0.48.0>,<0.31.0>]},{memory,81656},{message_queue_len,0},{reductions,5170},{trap_exit,true}]\ny(1) <0.32.0>\n\n0x0501bb80 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,user},{status,waiting},{initial_call,{user,server,2}},{backtrace,<<728 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,22}]},{heap_size,2584},{total_heap_size,6765},{links,[<0.29.0>,<0.32.0>,#Port<0.830>,<0.6.0>]},{memory,27596},{message_queue_len,0},{reductions,30017},{trap_exit,true}]\ny(1) <0.31.0>\n\n0x0501bb8c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<441 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,1597},{total_heap_size,1597},{links,[<0.11.0>,<0.31.0>]},{memory,6844},{message_queue_len,0},{reductions,166},{trap_exit,true}]\ny(1) <0.29.0>\n\n0x0501bb98 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,standard_error},{status,waiting},{initial_call,{standard_error,server,2}},{backtrace,<<187 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.27.0>,#Port<0.792>]},{memory,1388},{message_queue_len,0},{reductions,7},{trap_exit,true}]\ny(1) <0.28.0>\n\n0x0501bba4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,standard_error_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<464 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.11.0>,<0.28.0>]},{memory,1388},{message_queue_len,0},{reductions,40},{trap_exit,true}]\ny(1) <0.27.0>\n\n0x0501bbb0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,code_server},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<2875 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,421}]},{heap_size,6765},{total_heap_size,24476},{links,[<0.11.0>]},{memory,98280},{message_queue_len,0},{reductions,262009},{trap_exit,true}]\ny(1) <0.26.0>\n\n0x0501bbbc Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,file_server_2},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<398 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4406}]},{heap_size,4181},{total_heap_size,8362},{links,[#Port<0.496>,<0.11.0>]},{memory,33904},{message_queue_len,0},{reductions,4147997},{trap_exit,true}]\ny(1) <0.25.0>\n\n0x0501bbc8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,global_group},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<456 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.11.0>]},{memory,1368},{message_queue_len,0},{reductions,76},{trap_exit,true}]\ny(1) <0.24.0>\n\n0x0501bbd4 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{net_kernel,ticker,2}},{backtrace,<<194 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.21.0>]},{memory,1308},{message_queue_len,0},{reductions,441},{trap_exit,false}]\ny(1) <0.23.0>\n\n0x0501bbe0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,accept_loop,2}},{backtrace,<<387 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,139}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.21.0>]},{memory,12260},{message_queue_len,0},{reductions,126016},{trap_exit,false}]\ny(1) <0.22.0>\n\n0x0501bbec Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,net_kernel},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<652 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,114}]},{heap_size,1597},{total_heap_size,1974},{links,[<0.23.0>,<0.449.0>,<0.5157.0>,<0.18.0>,<0.22.0>,#Port<0.460>]},{memory,8448},{message_queue_len,0},{reductions,33199},{trap_exit,true}]\ny(1) <0.21.0>\n\n0x0501bbf8 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,auth},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<397 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,26}]},{heap_size,377},{total_heap_size,754},{links,[<0.18.0>]},{memory,3452},{message_queue_len,0},{reductions,3678},{trap_exit,true}]\ny(1) <0.20.0>\n\n0x0501bc04 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,erl_epmd},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<409 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.18.0>,#Port<0.473>]},{memory,1388},{message_queue_len,0},{reductions,135},{trap_exit,false}]\ny(1) <0.19.0>\n\n0x0501bc10 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,net_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<870 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,987},{links,[<0.19.0>,<0.20.0>,<0.21.0>,<0.11.0>]},{memory,4444},{message_queue_len,0},{reductions,265},{trap_exit,true}]\ny(1) <0.18.0>\n\n0x0501bc1c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,inet_db},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<498 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,24}]},{heap_size,377},{total_heap_size,754},{links,[<0.11.0>]},{memory,3452},{message_queue_len,0},{reductions,2423},{trap_exit,true}]\ny(1) <0.17.0>\n\n0x0501bc28 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<176 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,610},{total_heap_size,987},{links,[<0.13.0>]},{memory,4324},{message_queue_len,0},{reductions,249},{trap_exit,false}]\ny(1) <0.16.0>\n\n0x0501bc34 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<297 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.13.0>]},{memory,1308},{message_queue_len,0},{reductions,4},{trap_exit,false}]\ny(1) <0.15.0>\n\n0x0501bc40 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<339 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,987},{total_heap_size,1364},{links,[<0.13.0>]},{memory,5832},{message_queue_len,0},{reductions,760},{trap_exit,true}]\ny(1) <0.14.0>\n\n0x0501bc4c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,global_name_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<538 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,500}]},{heap_size,1597},{total_heap_size,1974},{links,[<0.14.0>,<0.16.0>,<0.15.0>,<0.11.0>]},{memory,8472},{message_queue_len,0},{reductions,163826},{trap_exit,true}]\ny(1) <0.13.0>\n\n0x0501bc58 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,rex},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<446 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,344}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.11.0>]},{memory,12360},{message_queue_len,0},{reductions,57712},{trap_exit,true}]\ny(1) <0.12.0>\n\n0x0501bc64 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,kernel_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1623 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,4181},{total_heap_size,8362},{links,[<0.26.0>,<0.29.0>,<0.33.0>,<0.34.0>,<0.27.0>,<0.17.0>,<0.24.0>,<0.25.0>,<0.18.0>,<0.12.0>,<0.13.0>,<0.10.0>]},{memory,34104},{message_queue_len,0},{reductions,3131},{trap_exit,true}]\ny(1) <0.11.0>\n\n0x0501bc70 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<228 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.9.0>,<0.11.0>]},{memory,1328},{message_queue_len,0},{reductions,72},{trap_exit,true}]\ny(1) <0.10.0>\n\n0x0501bc7c Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1414 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<0.7.0>,<0.10.0>]},{memory,3472},{message_queue_len,0},{reductions,44},{trap_exit,true}]\ny(1) <0.9.0>\n\n0x0501bc88 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,application_controller},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<566 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,32}]},{heap_size,4181},{total_heap_size,21892},{links,[<0.50.0>,<0.113.0>,<0.427.0>,<0.59.0>,<0.9.0>,<0.39.0>,<0.0.0>]},{memory,88124},{message_queue_len,0},{reductions,42916},{trap_exit,true}]\ny(1) <0.7.0>\n\n0x0501bc94 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,error_logger},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<553 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,233}]},{heap_size,4181},{total_heap_size,10946},{links,[<0.0.0>,<0.31.0>,#Port<0.1578>]},{memory,44260},{message_queue_len,0},{reductions,1290627},{trap_exit,true}]\ny(1) <0.6.0>\n\n0x0501bca0 Return addr 0x04c0a98c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,erl_prim_loader},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<620 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,528}]},{heap_size,4181},{total_heap_size,15127},{links,[#Port<0.1>,<0.0.0>]},{memory,60904},{message_queue_len,0},{reductions,1327391},{trap_exit,true}]\ny(1) <0.3.0>\n\n0x0501bcac Return addr 0x04c09b74 (diag_handler:do_diag_per_node/0 + 112)\ny(0) [{registered_name,init},{status,waiting},{initial_call,{otp_ring0,start,2}},{backtrace,<<830 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,544}]},{heap_size,2584},{total_heap_size,4181},{links,[<0.6.0>,<0.7.0>,<0.3.0>]},{memory,17140},{message_queue_len,0},{reductions,130426},{trap_exit,true}]\ny(1) <0.0.0>\n\n0x0501bcb8 Return addr 0x032ca77c (rpc:'-handle_call_call/6-fun-0-'/5 + 104)\ny(0) []\ny(1) []\ny(2) [{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{inets,\"5.2\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,3299},{memory_data,{4284698624,4184846336,{<0.299.0>,6656756}}},{disk_data,[{\"C:\\\",48162864,60},{\"D:\\\",51279476,0},{\"G:\\\",34724465,17}]}]\ny(3) [{{node,'ns_1@10.2.1.101',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.102',memcached},[{port,11210},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{otp,[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{cookie,pmqchiglstnppkwf}]},{memory_quota,3268},{{node,'ns_1@10.2.1.102',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.100',membership},active},{rebalance_status,{none,<<76 bytes>>}},{{node,'ns_1@10.2.1.101',membership},active},{rest_creds,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{creds,[{\"Administrator\",[{password,'filtered-out'}]}]}]},{buckets,[{'_vclock',[{'ns_1@10.2.1.100',{9,63461309965}}]},{configs,[{\"default\",[{num_replicas,1},{ram_quota,3426746368},{auth_type,sasl},{sasl_password,[]},{type,membase},{num_vbuckets,1024},{ht_size,3079},{tap_keepalive,0},{tap_noop_interval,20},{max_txn_size,1000},{ht_locks,5},{servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]}]},{port_servers,[{moxi,\"./bin/moxi/moxi\",[\"-Z\",{\"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",[port]},\"-z\",{\"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming\",[{rest,port}]},\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",{\"~s\",[{ns_moxi_sup,rest_user,[]}]}},{\"MOXI_SASL_PLAIN_PWD\",{\"~s\",[{ns_moxi_sup,rest_pass,[]}]}}]},use_stdio,stderr_to_stdout,stream]},{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",{\"~B\",[port]},\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",{\"admin=~s;default_bucket_name=default;auto_create=false\",[admin_user]},{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",{\"~s\",[{isasl,path}]}},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]}]},{alerts,[{email,[]},{email_alerts,false},{email_server,[{user,undefined},{pass,'filtered-out'},{addr,undefined},{port,undefined},{encrypt,false}]},{alerts,[server_down,server_unresponsive,server_up,server_joined,server_left,bucket_created,bucket_deleted,bucket_auth_failed]}]},{nodes_wanted,[{'_vclock',[{'ns_1@10.2.1.100',{2,63461308289}}]},'ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{rest,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{port,8091}]},{{node,'ns_1@10.2.1.102',membership},active},{{node,'ns_1@10.2.1.100',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.101',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.102',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{moxi,[{port,11211},{verbosity,[]}]},{replication,[{enabled,true}]},{{node,'ns_1@10.2.1.100',memcached},[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307259}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.101',memcached},[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.100',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]}]\ny(4) [\"bucket_engine_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r\",\"curl-7.21.1-w64_patched.tar.gz\r\",\"ep-engine_1.6.5r_4_g9d25ede-MINGW32_NT-6.0.i686.tar.gz\r\",\"libconflate_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"libevent-2.0.7-rc.tar.gz\r\",\"libmemcached-0.41_trond-norbye_mingw32-revno895.tar.gz\r\",\"libvbucket_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"membase-cli_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"memcached_1.4.4_359_g06c7d3b-MINGW32_NT-6.0.i686.tar.gz\r\",\"moxi_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"ns_server_1.6.5r.tar.gz\r\",\"pthreads-w64-2-8-0-release.tar.gz\r\",\"vbucketmigrator_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r\",\"wallace_1.6.5r-2-gc6cf01c-win64-201012280140\r\"]\ny(5) [{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{inets,\"5.2\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]\n\n0x0501bcd4 Return addr 0x00bdc194 ()\ny(0) Catch 0x032ca77c (rpc:'-handle_call_call/6-fun-0-'/5 + 104)\ny(1) []\ny(2) []\ny(3) []\ny(4) <0.12.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,28657}, {total_heap_size,28657}, {links,[]}, {memory,115024}, {message_queue_len,0}, {reductions,18492}, {trap_exit,false}]}]}, {memory,{4284698624,4184846336,{<0.299.0>,6656756}}}, {disk, [{"C:\\",48162864,60}, {"D:\\",51279476,0}, {"G:\\",34724465,17}]}]}, {'ns_1@10.2.1.101', [{version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {manifest, ["bucket_engine_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r", "curl-7.21.1-w64_patched.tar.gz\r", "ep-engine_1.6.5r_4_g9d25ede-MINGW32_NT-6.0.i686.tar.gz\r", "libconflate_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "libevent-2.0.7-rc.tar.gz\r", "libmemcached-0.41_trond-norbye_mingw32-revno895.tar.gz\r", "libvbucket_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "membase-cli_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "memcached_1.4.4_359_g06c7d3b-MINGW32_NT-6.0.i686.tar.gz\r", "moxi_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "ns_server_1.6.5r.tar.gz\r", "pthreads-w64-2-8-0-release.tar.gz\r", "vbucketmigrator_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r", "wallace_1.6.5r-2-gc6cf01c-win64-201012280140\r"]}, {config, [{{node,'ns_1@10.2.1.101',ns_log}, [{filename, "c:/Program Files/Membase/Server/data/ns_1/ns_log"}]}, {{node,'ns_1@10.2.1.102',memcached}, [{port,11210}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines, [{membase, [{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached, [{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}]}, {otp, [{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]}, {cookie,pmqchiglstnppkwf}]}, {memory_quota,3268}, {{node,'ns_1@10.2.1.102',ns_log}, [{filename, "c:/Program Files/Membase/Server/data/ns_1/ns_log"}]}, {{node,'ns_1@10.2.1.100',membership},active}, {rebalance_status, {none, <<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>}}, {{node,'ns_1@10.2.1.101',membership},active}, {rest_creds, [{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]}, {creds, [{"Administrator",[{password,'filtered-out'}]}]}]}, {buckets, [{'_vclock',[{'ns_1@10.2.1.100',{9,63461309965}}]}, {configs, [{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers, ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102']}, {map, [['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]}]}, {port_servers, [{moxi,"./bin/moxi/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,stderr_to_stdout,stream]}, {memcached,"./bin/memcached/memcached", ["-X","./bin/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","./bin/bucket_engine/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,stream]}]}, {alerts, [{email,[]}, {email_alerts,false}, {email_server, [{user,undefined}, {pass,'filtered-out'}, {addr,undefined}, {port,undefined}, {encrypt,false}]}, {alerts, [server_down,server_unresponsive,server_up, server_joined,server_left,bucket_created, bucket_deleted,bucket_auth_failed]}]}, {nodes_wanted, [{'_vclock',[{'ns_1@10.2.1.100',{2,63461308289}}]}, 'ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']}, {rest, [{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]}, {port,8091}]}, {{node,'ns_1@10.2.1.102',membership},active}, {{node,'ns_1@10.2.1.100',isasl}, [{path, "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}]}, {{node,'ns_1@10.2.1.101',isasl}, [{path, "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}]}, {{node,'ns_1@10.2.1.102',isasl}, [{path, "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}]}, {moxi,[{port,11211},{verbosity,[]}]}, {replication,[{enabled,true}]}, {{node,'ns_1@10.2.1.100',memcached}, [{'_vclock',[{'ns_1@10.2.1.100',{1,63461307259}}]}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines, [{membase, [{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached, [{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}]}, {{node,'ns_1@10.2.1.101',memcached}, [{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines, [{membase, [{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached, [{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}]}, {{node,'ns_1@10.2.1.100',ns_log}, [{filename, "c:/Program Files/Membase/Server/data/ns_1/ns_log"}]}]}, {basic_info, [{version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3232}, {memory_data, {4284698624,4210270208,{<11993.387.0>,5385512}}}, {disk_data, [{"C:\\",46243100,46}, {"D:\\",51809624,0}, {"G:\\",33929248,18}]}]}, {processes, [{<11993.0.0>, [{registered_name,init}, {status,waiting}, {initial_call,{otp_ring0,start,2}}, {backtrace, <<"Program counter: 0x00effcb8 (init:loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0464f65c Return addr 0x00aec194 ()\ny(0) {state,[{'-root',[<<37 bytes>>]},{'-progname',[<<3 bytes>>]},{'-home',[<<10 bytes>>]},{'-name',[<<15 bytes>>]},{'-pa',[<<18 bytes>>]},{'-pa',[<<29 bytes>>]},{'-pa',[<<32 bytes>>]},{'-pa',[<<46 bytes>>]},{'-pa',[<<32 bytes>>]},{'-setcookie',[<<8 bytes>>]},{'-ns_server',[<<19 bytes>>,<<6 bytes>>]},{'-ns_server',[<<24 bytes>>,<<8 bytes>>]},{'-ns_server',[<<24 bytes>>,<<2 bytes>>]},{'-kernel',[<<20 bytes>>,<<5 bytes>>,<<20 bytes>>,<<5 bytes>>]},{'-ns_server',[<<14 bytes>>,<<32 bytes>>]}],[],[[ns_bootstrap,override_resolver]],[{application_controller,<0.7.0>},{error_logger,<0.6.0>},{erl_prim_loader,<0.3.0>}],<0.2.0>,{started,started},{\"OTP APN 181 01\",\"R13B03\"},[],[]}\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,233}]}, {heap_size,1597}, {total_heap_size,3194}, {links,[<11993.6.0>,<11993.7.0>,<11993.3.0>]}, {memory,13192}, {message_queue_len,0}, {reductions,59884}, {trap_exit,true}]}, {<11993.3.0>, [{registered_name,erl_prim_loader}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x00f2fd88 (erl_prim_loader:loop/3 + 92)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d57394 Return addr 0x00aec194 ()\ny(0) []\ny(1) [\"bin/ns_server/deps/gen_smtp/ebin\",\"bin/ns_server/deps/menelaus/deps/mochiweb/ebin\",\"bin/ns_server/deps/menelaus/ebin\",\"bin/ns_server/deps/emoxi/ebin\",\"bin/ns_server/ebin\",\"C:\\PROGRA~1\\Membase\\Server\\bin\\erlang/lib/kernel-2.13.4/ebin\",\"C:\\PROGRA~1\\Membase\\Server\\bin\\erlang/lib/stdlib-1.16.4/ebin\"]\ny(2) <0.2.0>\ny(3) {state,efile,[],none,#Port<0.1>,infinity,undefined,true,{prim_state,false,undefined,undefined}}\ny(4) infinity\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,519}]}, {heap_size,1597}, {total_heap_size,12543}, {links,[#Port<11993.1>,<11993.0.0>]}, {memory,50568}, {message_queue_len,0}, {reductions,1323920}, {trap_exit,true}]}, {<11993.6.0>, [{registered_name,error_logger}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00f24db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x053e2d58 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,ns_log_mf_h,false,{state,\"logs\",10485760,10,998126,1,{file_descriptor,prim_file,{#Port<0.1578>,696}},[],#Fun},false},{handler,error_logger,false,[],false}]\ny(3) error_logger\ny(4) <0.2.0>\n\n0x053e2d70 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,43}]}, {heap_size,2584}, {total_heap_size,20295}, {links,[<11993.0.0>,<11993.31.0>,#Port<11993.1578>]}, {memory,81656}, {message_queue_len,0}, {reductions,388522}, {trap_exit,true}]}, {<11993.7.0>, [{registered_name,application_controller}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0309bcc8 Return addr 0x00aec194 ()\ny(0) []\ny(1) infinity\ny(2) application_controller\ny(3) {state,[],[],[],[{mnesia,<0.317.0>},{inets,<0.237.0>},{ns_server,<0.59.0>},{os_mon,<0.50.0>},{sasl,<0.39.0>},{stdlib,undefined},{kernel,<0.9.0>}],[],[{mnesia,temporary},{inets,temporary},{ns_server,temporary},{os_mon,temporary},{sasl,temporary},{stdlib,permanent},{kernel,permanent}],[],[]}\ny(4) application_controller\ny(5) <0.2.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,32}]}, {heap_size,4181}, {total_heap_size,21892}, {links, [<11993.50.0>,<11993.237.0>,<11993.317.0>, <11993.59.0>,<11993.9.0>,<11993.39.0>,<11993.0.0>]}, {memory,88124}, {message_queue_len,0}, {reductions,43037}, {trap_exit,true}]}, {<11993.9.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0325fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fcd4f4 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.10.0>,{appl_data,kernel,[application_controller,erl_reply,auth,boot_server,code_server,disk_log_server,disk_log_sup,erl_prim_loader,error_logger,file_server_2,fixtable_server,global_group,global_name_server,heart,init,kernel_config,kernel_sup,net_kernel,net_sup,rex,user,os_server,ddll_server,erl_epmd,inet_db,pg2],undefined,{kernel,[]},[application,application_controller,application_master,application_starter,auth,code,packages,code_server,dist_util,erl_boot_server,erl_distribution,erl_reply,error_handler,error_logger,file,file_server,file_io_server,global,global_group,global_search,group,heart,hipe_unified_loader,inet6_tcp,inet6_tcp_dist,inet6_udp,inet6_sctp,inet_config,inet_hosts,inet_gethost_native,inet_tcp_dist,kernel,kernel_config,net,net_adm,net_kernel,os,ram_file,rpc,user,user_drv,user_sup,disk_log,disk_log_1,disk_log_server,disk_log_sup,dist_ac,erl_ddll,erl_epmd,erts_debug,gen_tcp,gen_udp,gen_sctp,inet,inet_db,inet_dns,inet_parse,inet_res,inet_tcp,inet_udp,inet_sctp,pg2,seq_trace,standard_error,wrap_log_reader],[],infinity,infinity},[],0,<0.0.0>}\ny(2) <0.7.0>\n\n0x00fcd504 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<11993.7.0>,<11993.10.0>]}, {memory,3472}, {message_queue_len,0}, {reductions,44}, {trap_exit,true}]}, {<11993.10.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032610fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fcf8f8 Return addr 0x00aec194 ()\ny(0) []\ny(1) kernel\ny(2) <0.11.0>\ny(3) <0.9.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.9.0>,<11993.11.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,72}, {trap_exit,true}]}, {<11993.11.0>, [{registered_name,kernel_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f86b50 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,kernel_sup},one_for_all,[{child,<0.34.0>,kernel_safe_sup,{supervisor,start_link,[{local,kernel_safe_sup},kernel,safe]},permanent,infinity,supervisor,[kernel]},{child,<0.33.0>,kernel_config,{kernel_config,start_link,[]},permanent,2000,worker,[kernel_config]},{child,<0.29.0>,user,{user_sup,start,[]},temporary,2000,supervisor,[user_sup]},{child,<0.27.0>,standard_error,{standard_error,start_link,[]},temporary,2000,supervisor,[user_sup]},{child,<0.26.0>,code_server,{code,start_link,[]},permanent,2000,worker,[code]},{child,<0.25.0>,file_server_2,{file_server,start_link,[]},permanent,2000,worker,[file,file_server,file_io_server,prim_file]},{child,<0.24.0>,global_group,{global_group,start_link,[]},permanent,2000,worker,[global_group]},{child,<0.18.0>,net_sup,{erl_distribution,start_link,[]},permanent,infinity,supervisor,[erl_distribution]},{child,<0.17.0>,inet_db,{inet_db,start_link,[]},permanent,2000,worker,[inet_db]},{child,<0.13.0>,global_name_server,{global,start_link,[]},permanent,2000,worker,[global]},{child,<0.12.0>,rex,{rpc,start_link,[]},permanent,2000,worker,[rpc]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,1,[],kernel,[]}\ny(4) kernel_sup\ny(5) <0.10.0>\n\n0x00f86b6c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,4181}, {total_heap_size,8362}, {links, [<11993.26.0>,<11993.29.0>,<11993.33.0>,<11993.34.0>, <11993.27.0>,<11993.17.0>,<11993.24.0>,<11993.25.0>, <11993.18.0>,<11993.12.0>,<11993.13.0>,<11993.10.0>]}, {memory,34104}, {message_queue_len,0}, {reductions,3131}, {trap_exit,true}]}, {<11993.12.0>, [{registered_name,rex}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030788b0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) rpc\ny(3) {1,{<0.8906.0>,{<11585.12006.0>,{#Ref<11585.0.0.246339>,'ns_1@10.2.1.101'}},nil,nil}}\ny(4) rex\ny(5) <0.11.0>\n\n0x030788cc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,349}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<11993.11.0>]}, {memory,5996}, {message_queue_len,0}, {reductions,63716}, {trap_exit,true}]}, {<11993.13.0>, [{registered_name,global_name_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030958e4 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) global\ny(3) {state,true,['ns_1@10.2.1.100','ns_1@10.2.1.102'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],[],[],'nonode@nohost',<0.14.0>,<0.15.0>,<0.16.0>,no_trace,false}\ny(4) global_name_server\ny(5) <0.11.0>\n\n0x03095900 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,347}]}, {heap_size,2584}, {total_heap_size,2961}, {links, [<11993.14.0>,<11993.16.0>,<11993.15.0>,<11993.11.0>]}, {memory,12452}, {message_queue_len,0}, {reductions,86955}, {trap_exit,true}]}, {<11993.14.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x0329b31c (global:loop_the_locker/1 + 588)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fb9820 Return addr 0x0329b0b0 (global:init_the_locker/1 + 192)\ny(0) {multi,[],[],['ns_1@10.2.1.102','ns_1@10.2.1.100'],'ns_1@10.2.1.102',false,false}\ny(1) infinity\n\n0x00fb982c Return addr 0x00aec194 ()\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,610}, {total_heap_size,987}, {links,[<11993.13.0>]}, {memory,4324}, {message_queue_len,0}, {reductions,312}, {trap_exit,true}]}, {<11993.15.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x0329f3c8 (global:collect_deletions/2 + 76)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fb1f6c Return addr 0x0329f348 (global:loop_the_deleter/1 + 36)\ny(0) infinity\ny(1) []\ny(2) <0.13.0>\n\n0x00fb1f7c Return addr 0x00aec194 ()\ny(0) <0.13.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,377}, {total_heap_size,754}, {links,[<11993.13.0>]}, {memory,3392}, {message_queue_len,0}, {reductions,232}, {trap_exit,false}]}, {<11993.16.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x0329f4e8 (global:loop_the_registrar/0 + 12)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03093098 Return addr 0x00aec194 ()\ny(0) []\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,610}, {total_heap_size,987}, {links,[<11993.13.0>]}, {memory,4324}, {message_queue_len,0}, {reductions,249}, {trap_exit,false}]}, {<11993.17.0>, [{registered_name,inet_db}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030846f0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) inet_db\ny(3) {state,inet_db,inet_cache,inet_hosts_byname,inet_hosts_byaddr,inet_hosts_file_byname,inet_hosts_file_byaddr,#Ref<0.0.0.8>}\ny(4) inet_db\ny(5) <0.11.0>\n\n0x0308470c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,37}]}, {heap_size,377}, {total_heap_size,754}, {links,[<11993.11.0>]}, {memory,3452}, {message_queue_len,0}, {reductions,3586}, {trap_exit,true}]}, {<11993.18.0>, [{registered_name,net_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fc7898 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,net_sup},one_for_all,[{child,<0.21.0>,net_kernel,{net_kernel,start_link,[['ns_1@10.2.1.101',longnames]]},permanent,2000,worker,[net_kernel]},{child,<0.20.0>,auth,{auth,start_link,[]},permanent,2000,worker,[auth]},{child,<0.19.0>,erl_epmd,{erl_epmd,start_link,[]},permanent,2000,worker,[erl_epmd]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,1,[],erl_distribution,['ns_1@10.2.1.101',longnames]}\ny(4) net_sup\ny(5) <0.11.0>\n\n0x00fc78b4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,987}, {links, [<11993.19.0>,<11993.20.0>,<11993.21.0>,<11993.11.0>]}, {memory,4444}, {message_queue_len,0}, {reductions,265}, {trap_exit,true}]}, {<11993.19.0>, [{registered_name,erl_epmd}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fb33e8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) erl_epmd\ny(3) {state,#Port<0.473>,21100,ns_1}\ny(4) erl_epmd\ny(5) <0.18.0>\n\n0x00fb3404 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.18.0>,#Port<11993.473>]}, {memory,1388}, {message_queue_len,0}, {reductions,135}, {trap_exit,false}]}, {<11993.20.0>, [{registered_name,auth}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0563ab30 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) auth\ny(3) {state,pmqchiglstnppkwf,12}\ny(4) auth\ny(5) <0.18.0>\n\n0x0563ab4c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,26}]}, {heap_size,377}, {total_heap_size,754}, {links,[<11993.18.0>]}, {memory,3452}, {message_queue_len,0}, {reductions,3702}, {trap_exit,true}]}, {<11993.21.0>, [{registered_name,net_kernel}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0455fbf4 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) net_kernel\ny(3) {state,'ns_1@10.2.1.101','ns_1@10.2.1.101',longnames,{tick,<0.23.0>,15000},7000,sys_dist,[{<0.3488.0>,'ns_1@10.2.1.102'},{<0.267.0>,'ns_1@10.2.1.100'}],[],[{listen,#Port<0.460>,<0.22.0>,{net_address,{{0,0,0,0},21100},\"WIN-T7EVP273ON8\",tcp,inet},inet_tcp_dist}],[],0,all}\ny(4) net_kernel\ny(5) <0.18.0>\n\n0x0455fc10 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,106}]}, {heap_size,2584}, {total_heap_size,2961}, {links, [<11993.23.0>,<11993.267.0>,<11993.3488.0>, <11993.18.0>,<11993.22.0>,#Port<11993.460>]}, {memory,12396}, {message_queue_len,0}, {reductions,33168}, {trap_exit,true}]}, {<11993.22.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,accept_loop,2}}, {backtrace, <<"Program counter: 0x00f192d8 (prim_inet:accept0/2 + 92)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x053e04ec Return addr 0x032c8894 (inet_tcp:accept/1 + 20)\ny(0) 14673\ny(1) #Port<0.460>\n\n0x053e04f8 Return addr 0x032c2550 (inet_tcp_dist:accept_loop/2 + 48)\ny(0) []\n\n0x053e0500 Return addr 0x00aec194 ()\ny(0) []\ny(1) #Port<0.460>\ny(2) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,137}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<11993.21.0>]}, {memory,12260}, {message_queue_len,0}, {reductions,121386}, {trap_exit,false}]}, {<11993.23.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{net_kernel,ticker,2}}, {backtrace, <<"Program counter: 0x032d2384 (net_kernel:ticker_loop/2 + 28)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f707a8 Return addr 0x00aec194 ()\ny(0) 15000\ny(1) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.21.0>]}, {memory,1308}, {message_queue_len,0}, {reductions,433}, {trap_exit,false}]}, {<11993.24.0>, [{registered_name,global_group}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f797c0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) global_group\ny(3) {state,no_conf,true,[],[],[],[],[],'ns_1@10.2.1.101',[],normal,normal}\ny(4) global_group\ny(5) <0.11.0>\n\n0x00f797dc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.11.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,76}, {trap_exit,true}]}, {<11993.25.0>, [{registered_name,file_server_2}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bf0268 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) file_server\ny(3) #Port<0.496>\ny(4) file_server_2\ny(5) <0.11.0>\n\n0x04bf0284 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,587}]}, {heap_size,1597}, {total_heap_size,3194}, {links,[#Port<11993.496>,<11993.11.0>]}, {memory,13232}, {message_queue_len,0}, {reductions,3422635}, {trap_exit,true}]}, {<11993.26.0>, [{registered_name,code_server}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x032f727c (code_server:loop/1 + 64)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x053fdde8 Return addr 0x00aec194 ()\ny(0) {state,<0.11.0>,\"c:/PROGRA~1/Membase/Server/bin/erlang\",[\"bin/ns_server/deps/gen_smtp/ebin\",\"bin/ns_server/deps/menelaus/deps/mochiweb/ebin\",\"bin/ns_server/deps/menelaus/ebin\",\"bin/ns_server/ebin\",\".\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/kernel-2.13.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/stdlib-1.16.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/xmerl-1.2.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/webtool-0.8.5/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/typer-0.1.7.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/tv-2.1.4.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/tools-2.6.5/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/test_server-3.3.5/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/syntax_tools-1.6.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/ssl-3.10.7/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/ssh-1.1.7/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/snmp-4.15/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/sasl-2.1.8/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/runtime_tools-1.8.2/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/reltool-0.5.2/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/public_key-0.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/pman-2.7.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/percept-0.8.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/parsetools-2.0.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/otp_mibs-1.0.6/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/os_mon-2.2.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/odbc-2.10.6/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/observer-0.9.8.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/mnesia-4.4.12/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/inviso-0.6.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/inets-5.2/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/hipe-3.7.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/eunit-2.1.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/et-1.3.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/erts-5.7.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/erl_interface-3.6.4\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/erl_docgen-0.1\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/dialyzer-2.1.0/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/debugger-3.2.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/crypto-1.6.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/compiler-4.6.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/common_test-1.4.6/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/appmon-2.1.10.2/ebin\",\"c:/Program Files/Membase/Server/bin/ns_server/deps/menelaus/deps/erlwsh/ebin\"],4111,8208,no_cache,interactive,[]}\ny(1) <0.11.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,350}]}, {heap_size,4181}, {total_heap_size,21892}, {links,[<11993.11.0>]}, {memory,87944}, {message_queue_len,0}, {reductions,245222}, {trap_exit,true}]}, {<11993.27.0>, [{registered_name,standard_error_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f791e8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_bridge\ny(3) {state,standard_error,<0.28.0>,<0.28.0>,{local,standard_error_sup}}\ny(4) standard_error_sup\ny(5) <0.11.0>\n\n0x00f79204 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.11.0>,<11993.28.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,40}, {trap_exit,true}]}, {<11993.28.0>, [{registered_name,standard_error}, {status,waiting}, {initial_call,{standard_error,server,2}}, {backtrace, <<"Program counter: 0x032e196c (standard_error:server_loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7e0a4 Return addr 0x00aec194 ()\ny(0) #Port<0.792>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.27.0>,#Port<11993.792>]}, {memory,1388}, {message_queue_len,0}, {reductions,7}, {trap_exit,true}]}, {<11993.29.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7b0c0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_bridge\ny(3) {state,user_sup,<0.31.0>,<0.31.0>,{<0.29.0>,user_sup}}\ny(4) <0.29.0>\ny(5) <0.11.0>\n\n0x00f7b0dc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,1597}, {total_heap_size,1597}, {links,[<11993.11.0>,<11993.31.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,166}, {trap_exit,true}]}, {<11993.31.0>, [{registered_name,user}, {status,waiting}, {initial_call,{user,server,2}}, {backtrace, <<"Program counter: 0x0331dd88 (user:get_chars/8 + 176)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f74574 Return addr 0x0331aea0 (user:do_io_request/5 + 56)\ny(0) []\ny(1) []\ny(2) []\ny(3) unicode\ny(4) start\ny(5) {[],[]}\ny(6) #Port<0.830>\ny(7) {erl_scan,tokens,[1]}\ny(8) get_until\ny(9) io_lib\ny(10) [40,\"ns_1@10.2.1.101\",41,\"1\",62,32]\n\n0x00f745a4 Return addr 0x0331adc8 (user:server_loop/2 + 784)\ny(0) #Port<0.830>\ny(1) <0.31.0>\ny(2) <0.48.0>\n\n0x00f745b4 Return addr 0x0331a894 (user:catch_loop/3 + 56)\ny(0) #Port<0.830>\n\n0x00f745bc Return addr 0x00aec194 ()\ny(0) <0.32.0>\ny(1) #Port<0.830>\ny(2) Catch 0x0331a894 (user:catch_loop/3 + 56)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,5778}, {links, [<11993.29.0>,<11993.32.0>,#Port<11993.830>, <11993.6.0>]}, {memory,23648}, {message_queue_len,0}, {reductions,40326}, {trap_exit,true}]}, {<11993.32.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03e52924 (shell:get_command1/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f77b00 Return addr 0x03e522ec (shell:server_loop/7 + 148)\ny(0) []\ny(1) 12305\ny(2) []\ny(3) <0.47.0>\ny(4) <0.48.0>\n\n0x00f77b18 Return addr 0x00aec194 ()\ny(0) []\ny(1) []\ny(2) 1\ny(3) 20\ny(4) 20\ny(5) []\ny(6) 12305\ny(7) []\ny(8) 0\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,4}]}, {heap_size,2584}, {total_heap_size,20295}, {links,[<11993.47.0>,<11993.48.0>,<11993.31.0>]}, {memory,81656}, {message_queue_len,0}, {reductions,5170}, {trap_exit,true}]}, {<11993.33.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7dcd8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) kernel_config\ny(3) []\ny(4) <0.33.0>\ny(5) <0.11.0>\n\n0x00f7dcf4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.11.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,268}, {trap_exit,true}]}, {<11993.34.0>, [{registered_name,kernel_safe_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030737d4 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,kernel_safe_sup},one_for_one,[{child,<0.135.0>,dets,{dets_server,start_link,[]},permanent,2000,worker,[dets_server]},{child,<0.134.0>,dets_sup,{dets_sup,start_link,[]},permanent,1000,supervisor,[dets_sup]},{child,<0.130.0>,disk_log_server,{disk_log_server,start_link,[]},permanent,2000,worker,[disk_log_server]},{child,<0.129.0>,disk_log_sup,{disk_log_sup,start_link,[]},permanent,1000,supervisor,[disk_log_sup]},{child,<0.57.0>,timer_server,{timer,start_link,[]},permanent,1000,worker,[timer]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},4,3600,[],kernel,safe}\ny(4) kernel_safe_sup\ny(5) <0.11.0>\n\n0x030737f0 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,610}, {total_heap_size,987}, {links, [<11993.130.0>,<11993.134.0>,<11993.135.0>, <11993.57.0>,<11993.129.0>,<11993.11.0>]}, {memory,4484}, {message_queue_len,0}, {reductions,388}, {trap_exit,true}]}, {<11993.39.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0325fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7828c Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.40.0>,{appl_data,sasl,[sasl_sup,alarm_handler,overload,release_handler],undefined,{sasl,[]},[sasl,alarm_handler,format_lib_supp,misc_supp,overload,rb,rb_format_supp,release_handler,release_handler_1,erlsrv,sasl_report,sasl_report_tty_h,sasl_report_file_h,systools,systools_make,systools_rc,systools_relup,systools_lib],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x00f7829c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.7.0>,<11993.40.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<11993.40.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032610fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f78640 Return addr 0x00aec194 ()\ny(0) {state,tty,undefined}\ny(1) sasl\ny(2) <0.41.0>\ny(3) <0.39.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.39.0>,<11993.41.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,70}, {trap_exit,true}]}, {<11993.41.0>, [{registered_name,sasl_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7b470 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,sasl_sup},one_for_one,[{child,<0.45.0>,release_handler,{release_handler,start_link,[]},permanent,2000,worker,[]},{child,<0.42.0>,sasl_safe_sup,{supervisor,start_link,[{local,sasl_safe_sup},sasl,safe]},permanent,infinity,supervisor,[sasl]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,1,[],sasl,[]}\ny(4) sasl_sup\ny(5) <0.40.0>\n\n0x00f7b48c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<11993.42.0>,<11993.45.0>,<11993.40.0>]}, {memory,2916}, {message_queue_len,0}, {reductions,158}, {trap_exit,true}]}, {<11993.42.0>, [{registered_name,sasl_safe_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f77ec8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,sasl_safe_sup},one_for_one,[{child,<0.44.0>,overload,{overload,start_link,[]},permanent,2000,worker,[overload]},{child,<0.43.0>,alarm_handler,{alarm_handler,start_link,[]},permanent,2000,worker,dynamic}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},4,3600,[],sasl,safe}\ny(4) sasl_safe_sup\ny(5) <0.41.0>\n\n0x00f77ee4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<11993.43.0>,<11993.44.0>,<11993.41.0>]}, {memory,2916}, {message_queue_len,0}, {reductions,174}, {trap_exit,true}]}, {<11993.43.0>, [{registered_name,alarm_handler}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00f24db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8f924 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,alarm_handler,false,[{system_memory_high_watermark,[]}],false}]\ny(3) alarm_handler\ny(4) <0.42.0>\n\n0x00f8f93c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.42.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,43}, {trap_exit,true}]}, {<11993.44.0>, [{registered_name,overload}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8f570 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) overload\ny(3) {state,0,0,8.000000e-001,281,1.000000e-001,{0,0},clear}\ny(4) overload\ny(5) <0.42.0>\n\n0x00f8f58c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.42.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,39}, {trap_exit,false}]}, {<11993.45.0>, [{registered_name,release_handler}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8ea84 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) release_handler\ny(3) {state,[],\"C:\\PROGRA~1\\Membase\\Server\\bin\\erlang\",\"c:/PROGRA~1/Membase/Server/bin/erlang/releases\",[{release,\"OTP APN 181 01\",\"R13B03\",undefined,[],permanent}],undefined,{no_check,\"c:/PROGRA~1/Membase/Server/bin/erlang/bin/start\"},false,false,false,[]}\ny(4) release_handler\ny(5) <0.41.0>\n\n0x00f8eaa0 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,5}]}, {heap_size,610}, {total_heap_size,987}, {links,[<11993.41.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,1249}, {trap_exit,false}]}, {<11993.47.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03e55484 (shell:eval_loop/3 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8fcdc Return addr 0x00aec194 ()\ny(0) []\ny(1) []\ny(2) 12305\ny(3) []\ny(4) <0.32.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.32.0>]}, {memory,1308}, {message_queue_len,0}, {reductions,8}, {trap_exit,false}]}, {<11993.48.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03eb72c8 (io:wait_io_mon_reply/2 + 28)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7e444 Return addr 0x03eb6c0c (io:parse_erl_exprs/3 + 100)\ny(0) #Ref<0.0.0.44>\ny(1) <0.31.0>\n\n0x00f7e450 Return addr 0x03e5c180 (shell:'-get_command/5-fun-0-'/1 + 20)\ny(0) []\n\n0x00f7e458 Return addr 0x00aec194 ()\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.32.0>]}, {memory,1348}, {message_queue_len,0}, {reductions,14}, {trap_exit,false}]}, {<11993.50.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0325fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8d8e4 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.51.0>,{appl_data,os_mon,[os_mon_sup,os_mon_sysinfo,disksup,memsup,cpu_sup,os_sup_server],undefined,{os_mon,[]},[os_mon,os_mon_mib,os_sup,disksup,memsup,cpu_sup,os_mon_sysinfo,nteventlog],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x00f8d8f4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.7.0>,<11993.51.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<11993.51.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032610fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8dc98 Return addr 0x00aec194 ()\ny(0) []\ny(1) os_mon\ny(2) <0.52.0>\ny(3) <0.50.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.50.0>,<11993.52.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,40}, {trap_exit,true}]}, {<11993.52.0>, [{registered_name,os_mon_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f89fb0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,os_mon_sup},one_for_one,[{child,<0.55.0>,memsup,{memsup,start_link,[]},permanent,2000,worker,[memsup]},{child,<0.54.0>,disksup,{disksup,start_link,[]},permanent,2000,worker,[disksup]},{child,<0.53.0>,os_mon_sysinfo,{os_mon_sysinfo,start_link,[]},permanent,2000,worker,[os_mon_sysinfo]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},5,3600,[],os_mon,[]}\ny(4) os_mon_sup\ny(5) <0.51.0>\n\n0x00f89fcc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links, [<11993.53.0>,<11993.54.0>,<11993.55.0>,<11993.51.0>]}, {memory,3512}, {message_queue_len,0}, {reductions,274}, {trap_exit,true}]}, {<11993.53.0>, [{registered_name,os_mon_sysinfo}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d425f0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) os_mon_sysinfo\ny(3) {state,#Port<0.1438>}\ny(4) os_mon_sysinfo\ny(5) <0.52.0>\n\n0x00d4260c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,29}]}, {heap_size,1597}, {total_heap_size,2207}, {links,[<11993.52.0>,#Port<11993.1438>]}, {memory,9284}, {message_queue_len,0}, {reductions,6665}, {trap_exit,true}]}, {<11993.54.0>, [{registered_name,disksup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d493f8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) disksup\ny(3) {state,80,60000,{win32,nt},[{\"C:\\\",46243100,46},{\"D:\\\",51809624,0},{\"G:\\\",33929248,18}],not_used}\ny(4) disksup\ny(5) <0.52.0>\n\n0x00d49414 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,19}]}, {heap_size,377}, {total_heap_size,2961}, {links,[<11993.52.0>]}, {memory,12280}, {message_queue_len,0}, {reductions,57292}, {trap_exit,true}]}, {<11993.55.0>, [{registered_name,memsup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04fdf614 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) memsup\ny(3) {state,{win32,nt},false,{4210270208,4284698624},{<0.387.0>,5385512},false,60000,30000,8.000000e-001,5.000000e-002,<0.8908.0>,undefined,undefined,[],[]}\ny(4) memsup\ny(5) <0.52.0>\n\n0x04fdf630 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,19}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<11993.52.0>]}, {memory,12280}, {message_queue_len,0}, {reductions,131412}, {trap_exit,true}]}, {<11993.57.0>, [{registered_name,timer_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x053f9c70 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) 42\ny(2) timer\ny(3) []\ny(4) timer_server\ny(5) <0.34.0>\n\n0x053f9c8c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1048}]}, {heap_size,1597}, {total_heap_size,1974}, {links, [<11993.279.0>,<11993.329.0>,<11993.8889.0>, <11993.8890.0>,<11993.349.0>,<11993.280.0>, <11993.325.0>,<11993.257.0>,<11993.264.0>, <11993.34.0>]}, {memory,8512}, {message_queue_len,0}, {reductions,298710}, {trap_exit,true}]}, {<11993.59.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0325fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03049a94 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.60.0>,{appl_data,ns_server,[ns_server_sup,ns_config,ns_config_sup,ns_config_events,ns_node_disco,ns_node_disco_events],undefined,{ns_server,[]},[misc,ns_config,ns_config_default,ns_config_log,ns_config_sup,ns_config_rep,ns_log,ns_node_disco,ns_node_disco_conf_events,ns_node_disco_log,ns_port_init,ns_port_server,ns_port_sup,ns_server,ns_server_sup],[menelaus],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x03049aa4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<11993.7.0>,<11993.60.0>]}, {memory,1964}, {message_queue_len,0}, {reductions,46}, {trap_exit,true}]}, {<11993.60.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032610fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f789f0 Return addr 0x00aec194 ()\ny(0) []\ny(1) ns_server\ny(2) <0.61.0>\ny(3) <0.59.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.59.0>,<11993.61.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,50}, {trap_exit,true}]}, {<11993.61.0>, [{registered_name,ns_server_cluster_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030216a8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_server_cluster_sup},one_for_one,[{child,<0.256.0>,ns_server_sup,{ns_server_sup,start_link,[]},permanent,infinity,supervisor,[ns_server_sup]},{child,<0.64.0>,ns_config_sup,{ns_config_sup,start_link,[]},permanent,infinity,supervisor,[ns_config_sup]},{child,<0.63.0>,ns_cluster,{ns_cluster,start_link,[]},permanent,5000,worker,[ns_cluster]},{child,<0.62.0>,dist_manager,{dist_manager,start_link,[]},permanent,10,worker,[dist_manager]},{child,undefined,log_os_info,{log_os_info,start_link,[]},transient,10,worker,[log_os_info]},{child,undefined,ns_log_mf_h,{ns_log_mf_h,start_link,[]},transient,10,worker,[ns_log_mf_h]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,1,[],ns_server_cluster_sup,[]}\ny(4) ns_server_cluster_sup\ny(5) <0.60.0>\n\n0x030216c4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,377}, {total_heap_size,987}, {links, [<11993.62.0>,<11993.64.0>,<11993.256.0>,<11993.63.0>, <11993.60.0>]}, {memory,4464}, {message_queue_len,0}, {reductions,2794}, {trap_exit,true}]}, {<11993.62.0>, [{registered_name,dist_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fced48 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) dist_manager\ny(3) {state,false,\"127.0.0.1\"}\ny(4) dist_manager\ny(5) <0.61.0>\n\n0x00fced64 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.61.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,132}, {trap_exit,false}]}, {<11993.63.0>, [{registered_name,ns_cluster}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x045569c8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_cluster\ny(3) {state}\ny(4) ns_cluster\ny(5) <0.61.0>\n\n0x045569e4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,32}]}, {heap_size,6765}, {total_heap_size,8362}, {links,[<11993.61.0>]}, {memory,33884}, {message_queue_len,0}, {reductions,13731}, {trap_exit,false}]}, {<11993.64.0>, [{registered_name,ns_config_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0303e988 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_config_sup},rest_for_one,[{child,undefined,ns_config_log,{ns_config_log,start_link,[]},transient,10,worker,[]},{child,undefined,ns_config_isasl_sync,{ns_config_isasl_sync,start_link,[]},transient,10,worker,[]},{child,<0.66.0>,ns_config,{ns_config,start_link,[\"priv/config\",ns_config_default]},permanent,10,worker,[ns_config,ns_config_default]},{child,<0.65.0>,ns_config_events,{gen_event,start_link,[{local,ns_config_events}]},permanent,10,worker,[]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},3,10,[],ns_config_sup,[]}\ny(4) ns_config_sup\ny(5) <0.61.0>\n\n0x0303e9a4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,6765}, {total_heap_size,7142}, {links,[<11993.65.0>,<11993.66.0>,<11993.61.0>]}, {memory,29044}, {message_queue_len,0}, {reductions,1037}, {trap_exit,true}]}, {<11993.65.0>, [{registered_name,ns_config_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00f24db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0309685c Return addr 0x00f3d8a0 (proc_lib:wake_up/3 + 60)\ny(0) false\ny(1) []\ny(2) [{handler,ns_pubsub,#Ref<0.0.0.1925>,{state,#Fun,ok},<0.388.0>},{handler,ns_pubsub,#Ref<0.0.0.1698>,{state,#Fun,undefined},<0.347.0>},{handler,ns_pubsub,#Ref<0.0.0.1538>,{state,#Fun,undefined},<0.310.0>},{handler,ns_port_init,false,{state},false},{handler,menelaus_event,ns_config_events,{state,ns_config_events,[{ip,\"0.0.0.0\"},{port,8091},{approot,\"c:/Program Files/Membase/Server/bin/ns_server/deps/menelaus/priv/public\"},{docroot,\"c:/Program Files/Membase/Server/docs\"}],[{<0.7646.0>,#Ref<0.0.0.150829>},{<0.298.0>,#Ref<0.0.0.1760>}]},false},{handler,ns_node_disco_conf_events,false,{state},false},{handler,menelaus_event,ns_config_events,{state,ns_config_events,[{ip,\"0.0.0.0\"},{port,8091},{approot,\"c:/Program Files/Membase/Server/bin/ns_server/deps/menelaus/priv/public\"},{docroot,\"c:/Program Files/Membase/Server/docs\"}],[]},false},{handler,ns_node_disco_conf_events,false,{state},false},{handler,ns_config_log,false,{state,[{rebalance_status,{none,<<76 bytes>>}}]},false},{handler,ns_config_isasl_sync,false,{state,[{\"default\",[]}],\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\",1,\"_admin\",\"_admin\"},false}]\ny(3) ns_config_events\ny(4) <0.64.0>\n\n0x03096874 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d8b0 (proc_lib:wake_up/3 + 76)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,32}]}, {heap_size,987}, {total_heap_size,2584}, {links, [<11993.310.0>,<11993.347.0>,<11993.388.0>, <11993.64.0>]}, {memory,10912}, {message_queue_len,0}, {reductions,679812}, {trap_exit,true}]}, {<11993.66.0>, [{registered_name,ns_config}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0493fd28 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_config\ny(3) {config,{full,\"priv/config\",undefined,ns_config_default},[[],[{directory,\"c:/Program Files/Membase/Server/config\"},{nodes_wanted,['ns_1@10.2.1.101']},{{node,'ns_1@10.2.1.101',membership},active},{rest,[{port,8091}]},{rest_creds,[{creds,[]}]},{{node,'ns_1@10.2.1.101',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.101',memcached},[{port,11210},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{memory_quota,3268},{buckets,[{configs,[]}]},{moxi,[{port,11211},{verbosity,[]}]},{port_servers,[{moxi,\"./bin/moxi/moxi\",[\"-Z\",{\"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",[port]},\"-z\",{\"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming\",[{rest,port}]},\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",{\"~s\",[{ns_moxi_sup,rest_user,[]}]}},{\"MOXI_SASL_PLAIN_PWD\",{\"~s\",[{ns_moxi_sup,rest_pass,[]}]}}]},use_stdio,stderr_to_stdout,stream]},{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",{\"~B\",[port]},\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",{\"admin=~s;default_bucket_name=default;auto_create=false\",[admin_user]},{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",{\"~s\",[{isasl,path}]}},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]}]},{{node,'ns_1@10.2.1.101',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{alerts,[{email,[]},{email_alerts,false},{email_server,[{user,undefined},{pass,undefined},{addr,undefined},{port,undefined},{encrypt,false}]},{alerts,[server_down,server_unresponsive,server_up,server_joined,server_left,bucket_created,bucket_deleted,bucket_auth_failed]}]},{replication,[{enabled,true}]}]],[[{{node,'ns_1@10.2.1.101',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.102',memcached},[{port,11210},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{otp,[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{cookie,pmqchiglstnppkwf}]},{memory_quota,3268},{{node,'ns_1@10.2.1.102',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.100',membership},active},{rebalance_status,{none,<<76 bytes>>}},{{node,'ns_1@10.2.1.101',membership},active},{rest_creds,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{creds,[{\"Administrator\",[{password,\"j4958ph\"}]}]}]},{buckets,[{'_vclock',[{'ns_1@10.2.1.100',{9,63461309965}}]},{configs,[{\"default\",[{num_replicas,1},{ram_quota,3426746368},{auth_type,sasl},{sasl_password,[]},{type,membase},{num_vbuckets,1024},{ht_size,3079},{tap_keepalive,0},{tap_noop_interval,20},{max_txn_size,1000},{ht_locks,5},{servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]}]},{port_servers,[{moxi,\"./bin/moxi/moxi\",[\"-Z\",{\"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",[port]},\"-z\",{\"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming\",[{rest,port}]},\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",{\"~s\",[{ns_moxi_sup,rest_user,[]}]}},{\"MOXI_SASL_PLAIN_PWD\",{\"~s\",[{ns_moxi_sup,rest_pass,[]}]}}]},use_stdio,stderr_to_stdout,stream]},{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",{\"~B\",[port]},\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",{\"admin=~s;default_bucket_name=default;auto_create=false\",[admin_user]},{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",{\"~s\",[{isasl,path}]}},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]}]},{alerts,[{email,[]},{email_alerts,false},{email_server,[{user,undefined},{pass,undefined},{addr,undefined},{port,undefined},{encrypt,false}]},{alerts,[server_down,server_unresponsive,server_up,server_joined,server_left,bucket_created,bucket_deleted,bucket_auth_failed]}]},{nodes_wanted,[{'_vclock',[{'ns_1@10.2.1.100',{2,63461308289}}]},'ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{rest,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{port,8091}]},{{node,'ns_1@10.2.1.102',membership},active},{{node,'ns_1@10.2.1.100',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.101',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.102',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{moxi,[{port,11211},{verbosity,[]}]},{replication,[{enabled,true}]},{{node,'ns_1@10.2.1.100',memcached},[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307259}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.101',memcached},[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.100',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]}]],ns_config_default}\ny(4) ns_config\ny(5) <0.64.0>\n\n0x0493fd44 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,75025}, {total_heap_size,150050}, {links,[<11993.64.0>]}, {memory,600636}, {message_queue_len,0}, {reductions,693453}, {trap_exit,false}]}, {<11993.129.0>, [{registered_name,disk_log_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d5a310 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,disk_log_sup},simple_one_for_one,[{child,undefined,disk_log,{disk_log,istart_link,[]},temporary,1000,worker,[disk_log]}],{dict,1,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[[<0.333.0>,<0.130.0>]],[],[],[],[],[],[],[],[],[]}}},4,3600,[],disk_log_sup,[]}\ny(4) disk_log_sup\ny(5) <0.34.0>\n\n0x00d5a32c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,23}]}, {heap_size,1597}, {total_heap_size,2207}, {links,[<11993.333.0>,<11993.34.0>]}, {memory,9284}, {message_queue_len,0}, {reductions,7154}, {trap_exit,true}]}, {<11993.130.0>, [{registered_name,disk_log_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d48e08 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) disk_log_server\ny(3) {state,[]}\ny(4) disk_log_server\ny(5) <0.34.0>\n\n0x00d48e24 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,40}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<11993.333.0>,<11993.34.0>]}, {memory,10792}, {message_queue_len,0}, {reductions,8557}, {trap_exit,true}]}, {<11993.134.0>, [{registered_name,dets_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0541ffd0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,dets_sup},simple_one_for_one,[{child,undefined,dets,{dets,istart_link,[]},temporary,30000,worker,[dets]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},4,3600,[],dets_sup,[]}\ny(4) dets_sup\ny(5) <0.34.0>\n\n0x0541ffec Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,7}]}, {heap_size,377}, {total_heap_size,987}, {links,[<11993.34.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,720}, {trap_exit,true}]}, {<11993.135.0>, [{registered_name,dets}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0593fb5c Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) dets_server\ny(3) {state,40995,[<0.34.0>],[]}\ny(4) dets\ny(5) <0.34.0>\n\n0x0593fb78 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,10}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<11993.34.0>]}, {memory,6824}, {message_queue_len,0}, {reductions,1171}, {trap_exit,true}]}, {<11993.237.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0325fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03021ca4 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.238.0>,{appl_data,inets,[inets_sup,httpc_manager],undefined,{inets_app,[]},[inets,inets_sup,inets_app,inets_service,inets_regexp,ftp,ftp_progress,ftp_response,ftp_sup,http,httpc_handler,httpc_handler_sup,httpc_manager,httpc_profile_sup,httpc_request,httpc_response,httpc_sup,http_cookie,http_uri,http_chunk,http_request,http_response,http_transport,http_util,httpd,httpd_acceptor,httpd_acceptor_sup,httpd_cgi,httpd_conf,httpd_esi,httpd_example,httpd_file,httpd_instance_sup,httpd_log,httpd_manager,httpd_misc_sup,httpd_request,httpd_request_handler,httpd_response,httpd_script_env,httpd_socket,httpd_sup,httpd_util,mod_actions,mod_alias,mod_auth,mod_auth_dets,mod_auth_mnesia,mod_auth_plain,mod_auth_server,mod_browser,mod_cgi,mod_dir,mod_disk_log,mod_esi,mod_get,mod_head,mod_htaccess,mod_include,mod_log,mod_range,mod_responsecontrol,mod_security,mod_security_server,mod_trace,tftp,tftp_binary,tftp_engine,tftp_file,tftp_lib,tftp_logger,tftp_sup],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x03021cb4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<11993.7.0>,<11993.238.0>]}, {memory,1964}, {message_queue_len,0}, {reductions,42}, {trap_exit,true}]}, {<11993.238.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032610fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7e7f8 Return addr 0x00aec194 ()\ny(0) []\ny(1) inets_app\ny(2) <0.239.0>\ny(3) <0.237.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.237.0>,<11993.239.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,42}, {trap_exit,true}]}, {<11993.239.0>, [{registered_name,inets_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0304ab48 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,inets_sup},one_for_one,[{child,<0.246.0>,tftp_sup,{tftp_sup,start_link,[[]]},permanent,infinity,supervisor,[tftp_sup]},{child,<0.245.0>,httpd_sup,{httpd_sup,start_link,[[]]},permanent,infinity,supervisor,[httpd_sup]},{child,<0.241.0>,httpc_sup,{httpc_sup,start_link,[[{httpc,{default,only_session_cookies}}]]},permanent,infinity,supervisor,[httpc_sup]},{child,<0.240.0>,ftp_sup,{ftp_sup,start_link,[]},permanent,infinity,supervisor,[ftp_sup]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,3600,[],inets_sup,[]}\ny(4) inets_sup\ny(5) <0.238.0>\n\n0x0304ab64 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,4}]}, {heap_size,377}, {total_heap_size,754}, {links, [<11993.240.0>,<11993.245.0>,<11993.246.0>, <11993.241.0>,<11993.238.0>]}, {memory,3532}, {message_queue_len,0}, {reductions,320}, {trap_exit,true}]}, {<11993.240.0>, [{registered_name,ftp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030fe2a8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ftp_sup},simple_one_for_one,[{child,undefined,undefined,{ftp,start_link,[]},temporary,4000,worker,[ftp]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,3600,[],ftp_sup,[]}\ny(4) ftp_sup\ny(5) <0.239.0>\n\n0x030fe2c4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.239.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,54}, {trap_exit,true}]}, {<11993.241.0>, [{registered_name,httpc_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f71130 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,httpc_sup},one_for_one,[{child,<0.244.0>,httpc_handler_sup,{httpc_handler_sup,start_link,[]},permanent,infinity,supervisor,[httpc_handler_sup]},{child,<0.242.0>,httpc_profile_sup,{httpc_profile_sup,start_link,[[{httpc,{default,only_session_cookies}}]]},permanent,infinity,supervisor,[httpc_profile_sup]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,3600,[],httpc_sup,[[{httpc,{default,only_session_cookies}}]]}\ny(4) httpc_sup\ny(5) <0.239.0>\n\n0x00f7114c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<11993.242.0>,<11993.244.0>,<11993.239.0>]}, {memory,2916}, {message_queue_len,0}, {reductions,175}, {trap_exit,true}]}, {<11993.242.0>, [{registered_name,httpc_profile_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f7be10 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,httpc_profile_sup},one_for_one,[{child,<0.243.0>,httpc_manager,{httpc_manager,start_link,[{default,only_session_cookies}]},permanent,4000,worker,[httpc_manager]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,3600,[],httpc_profile_sup,[[{httpc,{default,only_session_cookies}}]]}\ny(4) httpc_profile_sup\ny(5) <0.241.0>\n\n0x00f7be2c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.241.0>,<11993.243.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,124}, {trap_exit,true}]}, {<11993.243.0>, [{registered_name,httpc_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03069e58 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) httpc_manager\ny(3) {state,[],49188,{undefined,53285},httpc_manager_session_db,httpc_manager,{options,{undefined,[]},0,2,5,120000,2,disabled,false,inet,default,default}}\ny(4) httpc_manager\ny(5) <0.242.0>\n\n0x03069e74 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,987}, {total_heap_size,987}, {links,[<11993.242.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,150}, {trap_exit,true}]}, {<11993.244.0>, [{registered_name,httpc_handler_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f72ca8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,httpc_handler_sup},simple_one_for_one,[{child,undefined,undefined,{httpc_handler,start_link,[]},temporary,4000,worker,[httpc_handler]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,3600,[],httpc_handler_sup,[]}\ny(4) httpc_handler_sup\ny(5) <0.241.0>\n\n0x00f72cc4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,987}, {total_heap_size,987}, {links,[<11993.241.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,178}, {trap_exit,true}]}, {<11993.245.0>, [{registered_name,httpd_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030ff3c8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,httpd_sup},one_for_one,[],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,3600,[],httpd_sup,[[]]}\ny(4) httpd_sup\ny(5) <0.239.0>\n\n0x030ff3e4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.239.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,45}, {trap_exit,true}]}, {<11993.246.0>, [{registered_name,tftp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03085970 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,tftp_sup},one_for_one,[],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,3600,[],tftp_sup,[[]]}\ny(4) tftp_sup\ny(5) <0.239.0>\n\n0x0308598c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.239.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,46}, {trap_exit,true}]}, {<11993.256.0>, [{registered_name,ns_server_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030f1b0c Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_server_sup},one_for_one,[{child,<0.389.0>,ns_tick,{ns_tick,start_link,[]},permanent,10,worker,[ns_tick]},{child,<0.388.0>,ns_moxi_sup,{ns_moxi_sup,start_link,[]},permanent,infinity,supervisor,[ns_moxi_sup]},{child,<0.347.0>,ns_bad_bucket_sup,{ns_bucket_sup,start_link,[ns_bad_bucket_sup,#Fun,ns_bad_bucket_worker]},permanent,infinity,supervisor,[ns_bucket_sup]},{child,<0.346.0>,ns_bad_bucket_worker,{work_queue,start_link,[ns_bad_bucket_worker]},permanent,10,worker,[work_queue]},{child,<0.314.0>,ns_mnesia,{ns_mnesia,start_link,[]},permanent,10000,worker,[ns_mnesia]},{child,<0.313.0>,ns_orchestrator,{ns_orchestrator,start_link,[]},permanent,20,worker,[ns_orchestrator]},{child,<0.310.0>,ns_good_bucket_sup,{ns_bucket_sup,start_link,[ns_good_bucket_sup,#Fun,ns_good_bucket_worker]},permanent,infinity,supervisor,[ns_bucket_sup]},{child,<0.309.0>,ns_good_bucket_worker,{work_queue,start_link,[ns_good_bucket_worker]},permanent,10,worker,[work_queue]},{child,<0.308.0>,ns_stats_event,{gen_event,start_link,[{local,ns_stats_event}]},permanent,10,worker,dynamic},{child,<0.307.0>,ns_tick_event,{gen_event,start_link,[{local,ns_tick_event}]},permanent,10,worker,dynamic},{child,<0.301.0>,ns_port_sup,{ns_port_sup,start_link,[]},permanent,10,worker,[ns_port_sup]},{child,<0.296.0>,menelaus,{menelaus_app,start_subapp,[]},permanent,infinity,supervisor,[menelaus_app]},{child,<0.280.0>,ns_doctor,{ns_doctor,start_link,[]},permanent,10,worker,[ns_doctor]},{child,<0.279.0>,ns_heart,{ns_heart,start_link,[]},permanent,10,worker,[ns_heart]},{child,<0.262.0>,ns_node_disco_sup,{ns_node_disco_sup,start_link,[]},permanent,infinity,supervisor,[ns_node_disco_sup]},{child,<0.259.0>,ns_mail_sup,{ns_mail_sup,start_link,[]},permanent,infinity,supervisor,[ns_mail_sup]},{child,<0.258.0>,ns_log_events,{gen_event,start_link,[{local,ns_log_events}]},permanent,10,worker,dynamic},{child,<0.257.0>,ns_log,{ns_log,start_link,[]},permanent,10,worker,[ns_log]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_server_sup,[]}\ny(4) ns_server_sup\ny(5) <0.61.0>\n\n0x030f1b28 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,46368}, {total_heap_size,121393}, {links, [<11993.296.0>,<11993.309.0>,<11993.346.0>, <11993.388.0>,<11993.389.0>,<11993.347.0>, <11993.313.0>,<11993.314.0>,<11993.310.0>, <11993.307.0>,<11993.308.0>,<11993.301.0>, <11993.259.0>,<11993.279.0>,<11993.280.0>, <11993.262.0>,<11993.257.0>,<11993.258.0>, <11993.61.0>]}, {memory,486368}, {message_queue_len,0}, {reductions,55933}, {trap_exit,true}]}, {<11993.257.0>, [{registered_name,ns_log}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bf6c28 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_log\ny(3) {state,[{log_entry,{1294,88057,838003},'ns_1@10.2.1.101',ns_node_disco,3,\"Initial otp cookie generated: ~p\",[pdofzwfyczwsowjp],info},{log_entry,{1294,88057,885002},'ns_1@10.2.1.101',menelaus_app,1,\"Membase Server has started on web port ~p on node ~p.\",[8091,'ns_1@10.2.1.101'],info},{log_entry,{1294,88108,136403},'ns_1@10.2.1.100',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.100','ns_1@10.2.1.101'],info},{log_entry,{1294,88108,383002},'ns_1@10.2.1.101',menelaus_app,1,\"Membase Server has started on web port ~p on node ~p.\",[8091,'ns_1@10.2.1.101'],info},{log_entry,{1294,88108,695024},'ns_1@10.2.1.101',ns_cluster,3,\"Node ~s joined cluster\",['ns_1@10.2.1.101'],info},{log_entry,{1294,88115,374400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101'],[]],info},{log_entry,{1294,88121,458400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[wait_for_memcached_failed],info},{log_entry,{1294,88151,83402},'ns_1@10.2.1.100',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.100',1],info},{log_entry,{1294,88198,567003},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.101',1],info},{log_entry,{1294,88206,713403},'ns_1@10.2.1.100',ns_orchestrator,1,\"Rebalance completed successfully.~n\",[],info},{log_entry,{1294,88468,373401},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n (repeated ~p times)\",[['ns_1@10.2.1.100','ns_1@10.2.1.101'],[],1],info},{log_entry,{1294,89089,585005},'ns_1@10.2.1.102',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.102','ns_1@10.2.1.101'],info},{log_entry,{1294,89089,773024},'ns_1@10.2.1.102',ns_cluster,3,\"Node ~s joined cluster\",['ns_1@10.2.1.102'],info},{log_entry,{1294,89089,788403},'ns_1@10.2.1.100',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.100','ns_1@10.2.1.102'],info},{log_entry,{1294,89090,304213},'ns_1@10.2.1.101',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.101','ns_1@10.2.1.102'],info},{log_entry,{1294,89092,596400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[]],info},{log_entry,{1294,89098,696400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[wait_for_memcached_failed],info},{log_entry,{1294,89176,806003},'ns_1@10.2.1.102',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.102',1],info},{log_entry,{1294,89428,372401},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n (repeated ~p times)\",[wait_for_memcached_failed,2],info},{log_entry,{1294,89428,372401},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n (repeated ~p times)\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[],3],info},{log_entry,{1294,89842,961400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[stopped],info},{log_entry,{1294,89861,634400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[]],info},{log_entry,{1294,90758,702400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[stopped],info},{log_entry,{1294,90765,67400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[]],info},{log_entry,{1294,90778,796210},'ns_1@10.2.1.101',ns_memcached,4,\"Control connection to memcached on ~p disconnected: ~p\",['ns_1@10.2.1.101',{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]}],info},{log_entry,{1294,90778,858400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[{{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},{gen_server,call,[{'ns_memcached-default','ns_1@10.2.1.101'},{delete_vbucket,633},30000]}}],info},{log_entry,{1294,90781,885212},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.101',0],info},{log_entry,{1294,90813,53400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[{{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},{gen_server,call,[{'ns_memcached-default','ns_1@10.2.1.101'},{delete_vbucket,65},30000]}}],info},{log_entry,{1294,90835,18400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[{{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},{gen_server,call,[{'ns_memcached-default','ns_1@10.2.1.101'},{delete_vbucket,70},30000]}}],info},{log_entry,{1294,91107,927211},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds. (repeated ~p times)\",[\"default\",'ns_1@10.2.1.101',0,9],info},{log_entry,{1294,91107,927211},'ns_1@10.2.1.101',ns_memcached,4,\"Control connection to memcached on ~p disconnected: ~p (repeated ~p times)\",['ns_1@10.2.1.101',{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},9],info},{log_entry,{1294,91108,378401},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n (repeated ~p times)\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[],2],info},{log_entry,{1294,91120,672210},'ns_1@10.2.1.101',ns_memcached,4,\"Control connection to memcached on ~p disconnected: ~p\",['ns_1@10.2.1.101',{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]}],info},{log_entry,{1294,91120,813213},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.101',0],info}],{dict,2,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[[{ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.101',0]}|{5,{1294,91120,813213},{1294,91279,934224}}]],[],[],[],[],[],[[{ns_memcached,4,\"Control connection to memcached on ~p disconnected: ~p\",['ns_1@10.2.1.101',{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]}]}|{5,{1294,91120,672210},{1294,91279,934210}}]],[],[],[],[],[],[],[]}}},undefined,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}\ny(4) ns_log\ny(5) <0.256.0>\n\n0x04bf6c44 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,21}]}, {heap_size,4181}, {total_heap_size,21892}, {links,[<11993.57.0>,<11993.256.0>]}, {memory,88024}, {message_queue_len,0}, {reductions,69567}, {trap_exit,false}]}, {<11993.258.0>, [{registered_name,ns_log_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00aec190 (unknown function)\nCP: 0x00aec194 ()\narity = 3\n proc_lib\n wake_up\n [gen_event,wake_hib,[<0.256.0>,ns_log_events,[{handler,ns_mail_log,false,{state},false}],[]]]\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,16}]}, {heap_size,34}, {total_heap_size,34}, {links,[<11993.256.0>]}, {memory,572}, {message_queue_len,0}, {reductions,10081}, {trap_exit,true}]}, {<11993.259.0>, [{registered_name,ns_mail_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030ff778 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_mail_sup},one_for_all,[{child,undefined,ns_mail_log,{ns_mail_log,start_link,[]},transient,10,worker,[ns_mail_log]},{child,<0.260.0>,ns_mail,{ns_mail,start_link,[]},permanent,10,worker,[ns_mail]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_mail_sup,[]}\ny(4) ns_mail_sup\ny(5) <0.256.0>\n\n0x030ff794 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<11993.260.0>,<11993.256.0>]}, {memory,2896}, {message_queue_len,0}, {reductions,664}, {trap_exit,true}]}, {<11993.260.0>, [{registered_name,ns_mail}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f703e0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_mail\ny(3) empty_state\ny(4) ns_mail\ny(5) <0.259.0>\n\n0x00f703fc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.259.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,27}, {trap_exit,true}]}, {<11993.262.0>, [{registered_name,ns_node_disco_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fb9bb8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_node_disco_sup},rest_for_one,[{child,<0.273.0>,ns_config_rep,{ns_config_rep,start_link,[]},permanent,10,worker,[ns_config_rep]},{child,undefined,ns_node_disco_conf_events,{ns_node_disco_conf_events,start_link,[]},transient,10,worker,[]},{child,undefined,ns_node_disco_log,{ns_node_disco_log,start_link,[]},transient,10,worker,[]},{child,<0.264.0>,ns_node_disco,{ns_node_disco,start_link,[]},permanent,10,worker,[]},{child,<0.263.0>,ns_node_disco_events,{gen_event,start_link,[{local,ns_node_disco_events}]},permanent,10,worker,[]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_node_disco_sup,[]}\ny(4) ns_node_disco_sup\ny(5) <0.256.0>\n\n0x00fb9bd4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,4}]}, {heap_size,233}, {total_heap_size,610}, {links, [<11993.264.0>,<11993.273.0>,<11993.263.0>, <11993.256.0>]}, {memory,2936}, {message_queue_len,0}, {reductions,805}, {trap_exit,true}]}, {<11993.263.0>, [{registered_name,ns_node_disco_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00f24db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0304a55c Return addr 0x00f3d8a0 (proc_lib:wake_up/3 + 60)\ny(0) false\ny(1) []\ny(2) [{handler,menelaus_event,ns_node_disco_events,{state,ns_node_disco_events,undefined,[{<0.7646.0>,#Ref<0.0.0.150831>},{<0.298.0>,#Ref<0.0.0.1762>}]},false},{handler,ns_node_disco_rep_events,false,{state},false},{handler,ns_node_disco_log,false,{state},false}]\ny(3) ns_node_disco_events\ny(4) <0.262.0>\n\n0x0304a574 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d8b0 (proc_lib:wake_up/3 + 76)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,615}]}, {heap_size,233}, {total_heap_size,377}, {links,[<11993.262.0>]}, {memory,2024}, {message_queue_len,0}, {reductions,26883}, {trap_exit,true}]}, {<11993.264.0>, [{registered_name,ns_node_disco}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04f3a91c Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_node_disco\ny(3) {state,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],{interval,#Ref<0.0.0.1322>}}\ny(4) ns_node_disco\ny(5) <0.262.0>\n\n0x04f3a938 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1920}]}, {heap_size,46368}, {total_heap_size,53133}, {links,[<11993.262.0>,<11993.57.0>]}, {memory,212988}, {message_queue_len,0}, {reductions,2488249}, {trap_exit,false}]}, {<11993.267.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,do_setup,6}}, {backtrace, <<"Program counter: 0x044784b4 (dist_util:con_loop/9 + 72)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0563ffa8 Return addr 0x00aec194 ()\ny(0) []\ny(1) #Fun\ny(2) #Fun\ny(3) {tick,46189,44693,0,4}\ny(4) normal\ny(5) 'ns_1@10.2.1.101'\ny(6) {net_address,{{10,2,1,100},21100},\"10.2.1.100\",tcp,inet}\ny(7) #Port<0.4125>\ny(8) 'ns_1@10.2.1.100'\ny(9) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,42}]}, {heap_size,233}, {total_heap_size,610}, {links,[<11993.21.0>,#Port<11993.4125>]}, {memory,2836}, {message_queue_len,0}, {reductions,5507}, {trap_exit,false}]}, {<11993.273.0>, [{registered_name,ns_config_rep}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0586d494 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_config_rep\ny(3) {state}\ny(4) ns_config_rep\ny(5) <0.262.0>\n\n0x0586d4b0 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,39}]}, {heap_size,46368}, {total_heap_size,75025}, {links,[<11993.262.0>]}, {memory,300536}, {message_queue_len,0}, {reductions,60746}, {trap_exit,false}]}, {<11993.279.0>, [{registered_name,ns_heart}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0306c6d0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_heart\ny(3) [{replication,[{\"default\",5.000000e-001}]},{system_memory_data,[{total_memory,4284698624},{free_memory,87543808},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{3231003,108}},{context_switches,{572644,0}},{garbage_collection,{128412,1248365785,0}},{io,{{input,79663444},{output,38598035}}},{reductions,{396981157,606760}},{run_queue,0},{runtime,{31059,78}}]}]\ny(4) ns_heart\ny(5) <0.256.0>\n\n0x0306c6ec Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,21}]}, {heap_size,1597}, {total_heap_size,4181}, {links,[<11993.256.0>,<11993.57.0>]}, {memory,17180}, {message_queue_len,0}, {reductions,5893857}, {trap_exit,false}]}, {<11993.280.0>, [{registered_name,ns_doctor}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03065480 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_doctor\ny(3) {state,{dict,3,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[['ns_1@10.2.1.100',{last_heard,{1294,91290,339210}},{active_buckets,[\"default\"]},{memory,[{total,23738784},{processes,14589724},{processes_used,14571156},{system,9149060},{atom,561277},{atom_used,559430},{binary,874888},{code,4593788},{ets,1699076}]},{cluster_compatibility_version,1},{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{inets,\"5.2\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,3302},{memory_data,{4284698624,4193091584,{<11585.299.0>,6656796}}},{disk_data,[{\"C:\\\",48162864,60},{\"D:\\\",51279476,0},{\"G:\\\",34724465,17}]},{replication,[{\"default\",5.000000e-001}]},{system_memory_data,[{total_memory,4284698624},{free_memory,94519296},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{3300395,0}},{context_switches,{971035,0}},{garbage_collection,{173040,1932250783,0}},{io,{{input,102457972},{output,50761058}}},{reductions,{798323319,5972627}},{run_queue,0},{runtime,{46503,436}}]}]],[['ns_1@10.2.1.101',{last_heard,{1294,91290,355211}},{active_buckets,[\"default\"]},{memory,[{total,19121560},{processes,10863980},{processes_used,10854844},{system,8257580},{atom,560301},{atom_used,557450},{binary,112504},{code,4562805},{ets,1608132}]},{cluster_compatibility_version,1},{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{inets,\"5.2\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,3232},{memory_data,{4284698624,4210270208,{<0.387.0>,5385512}}},{disk_data,[{\"C:\\\",46243100,46},{\"D:\\\",51809624,0},{\"G:\\\",33929248,18}]},{replication,[{\"default\",5.000000e-001}]},{system_memory_data,[{total_memory,4284698624},{free_memory,87543808},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{3231003,108}},{context_switches,{572644,0}},{garbage_collection,{128412,1248365785,0}},{io,{{input,79663444},{output,38598035}}},{reductions,{396981157,606760}},{run_queue,0},{runtime,{31059,78}}]}]],[['ns_1@10.2.1.102',{last_heard,{1294,91290,105210}},{active_buckets,[\"default\"]},{memory,[{total,16886920},{processes,8795140},{processes_used,8782172},{system,8091780},{atom,541565},{atom_used,529955},{binary,567976},{code,4290459},{ets,1279148}]},{cluster_compatibility_version,1},{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,2220},{memory_data,{4284698624,3351322624,{<10870.307.0>,4114268}}},{disk_data,[{\"C:\\\",49423972,41},{\"D:\\\",52797620,0},{\"G:\\\",34724465,17}]},{replication,[{\"default\",0.000000e+000}]},{system_memory_data,[{total_memory,4284698624},{free_memory,933093376},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{2210316,0}},{context_switches,{223166,0}},{garbage_collection,{63145,320518908,0}},{io,{{input,23485359},{output,21305805}}},{reductions,{129761022,609670}},{run_queue,0},{runtime,{12058,46}}]}]],[],[],[],[],[],[],[],[],[],[],[],[],[]}}}}\ny(4) ns_doctor\ny(5) <0.256.0>\n\n0x0306549c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1065}]}, {heap_size,6765}, {total_heap_size,9349}, {links,[<11993.256.0>,<11993.57.0>]}, {memory,37852}, {message_queue_len,0}, {reductions,557146}, {trap_exit,false}]}, {<11993.296.0>, [{registered_name,menelaus_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d40cf0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,menelaus_sup},one_for_one,[{child,<0.8890.0>,hot_keys_keeper,{hot_keys_keeper,start_link,[]},permanent,5000,worker,dynamic},{child,undefined,menelaus_event,{menelaus_event,start_link,[]},transient,5000,worker,dynamic},{child,<0.297.0>,menelaus_web,{menelaus_web,start_link,[]},permanent,5000,worker,dynamic}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,10,[{1294,91279,934216}],menelaus_sup,[]}\ny(4) menelaus_sup\ny(5) <0.256.0>\n\n0x00d40d0c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,4181}, {total_heap_size,32838}, {links,[<11993.297.0>,<11993.8890.0>,<11993.256.0>]}, {memory,131828}, {message_queue_len,0}, {reductions,5636}, {trap_exit,true}]}, {<11993.297.0>, [{registered_name,menelaus_web}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bf75bc Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mochiweb_socket_server\ny(3) {mochiweb_socket_server,8091,#Fun,{local,menelaus_web},2046,{0,0,0,0},#Port<0.4131>,<0.8877.0>,128}\ny(4) menelaus_web\ny(5) <0.296.0>\n\n0x04bf75d8 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,12}]}, {heap_size,610}, {total_heap_size,1220}, {links, [<11993.298.0>,<11993.7646.0>,<11993.8877.0>, <11993.296.0>,#Port<11993.4131>]}, {memory,5396}, {message_queue_len,0}, {reductions,2411}, {trap_exit,true}]}, {<11993.298.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x04458f08 (menelaus_web:handle_streaming/4 + 196)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0496d188 Return addr 0x044572f8 (menelaus_web:loop/3 + 12136)\ny(0) {struct,[{buckets,[{struct,[{name,<<7 bytes>>},{nodeLocator,vbucket},{saslPassword,<<0 bytes>>},{nodes,[{struct,[{replication,5.000000e-001},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]}]},{vBucketServerMap,{struct,[{hashAlgorithm,<<3 bytes>>},{numReplicas,1},{serverList,[<<16 bytes>>,<<16 bytes>>,<<16 bytes>>]},{vBucketMap,[[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1]]}]}}]}]}]}\ny(1) {struct,[{buckets,[{struct,[{name,<<7 bytes>>},{nodeLocator,vbucket},{saslPassword,<<0 bytes>>},{nodes,[{struct,[{replication,5.000000e-001},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]}]},{vBucketServerMap,{struct,[{hashAlgorithm,<<3 bytes>>},{numReplicas,1},{serverList,[<<16 bytes>>,<<16 bytes>>,<<16 bytes>>]},{vBucketMap,[[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1]]}]}}]}]}]}\ny(2) {mochiweb_response,{mochiweb_request,#Port<0.4276>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}},200,{6,{\"pragma\",{\"Pragma\",\"no-cache\"},{\"cache-control\",{\"Cache-Control\",\"no-cache no-store max-age=0\"},nil,{\"content-type\",{\"Content-Type\",\"application/json; charset=utf-8\"},nil,{\"date\",{\"Date\",\"Mon, 03 Jan 2011 20:55:08 GMT\"},nil,nil}}},{\"server\",{\"Server\",\"Membase Server 1.6.5r\"},nil,{\"transfer-encoding\",{\"Transfer-Encoding\",\"chunked\"},nil,nil}}}}}\ny(3) {mochiweb_request,#Port<0.4276>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}}\ny(4) #Fun\n\n0x0496d1a0 Return addr 0x04463be8 (mochiweb_http:headers/5 + 680)\ny(0) []\ny(1) []\ny(2) []\ny(3) []\ny(4) {mochiweb_request,#Port<0.4276>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}}\ny(5) Catch 0x04457308 (menelaus_web:loop/3 + 12152)\n\n0x0496d1bc Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) []\ny(2) []\ny(3) {mochiweb_request,#Port<0.4276>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}}\n\n0x0496d1d0 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,116}]}, {heap_size,46368}, {total_heap_size,242786}, {links,[<11993.297.0>,#Port<11993.4276>]}, {memory,971680}, {message_queue_len,0}, {reductions,13659074}, {trap_exit,false}]}, {<11993.301.0>, [{registered_name,ns_port_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04c268ec Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_port_sup},one_for_one,[{child,<0.305.0>,{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",\"11210\",\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",\"admin=_admin;default_bucket_name=default;auto_create=false\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]},{supervisor_cushion,start_link,[memcached,5000,ns_port_server,start_link,[memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",\"11210\",\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",\"admin=_admin;default_bucket_name=default;auto_create=false\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]]]},permanent,10,worker,[ns_port_server]},{child,<0.303.0>,{moxi,\"./bin/moxi/moxi\",[\"-Z\",\"port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",\"-z\",\"url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming\",\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",\"Administrator\"},{\"MOXI_SASL_PLAIN_PWD\",\"j4958ph\"}]},use_stdio,stderr_to_stdout,stream]},{supervisor_cushion,start_link,[moxi,5000,ns_port_server,start_link,[moxi,\"./bin/moxi/moxi\",[\"-Z\",\"port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",\"-z\",\"url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming\",\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",\"Administrator\"},{\"MOXI_SASL_PLAIN_PWD\",\"j4958ph\"}]},use_stdio,stderr_to_stdout,stream]]]},permanent,10,worker,[ns_port_server]},{child,undefined,ns_port_init,{ns_port_init,start_link,[]},transient,10,worker,[]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_port_sup,[]}\ny(4) ns_port_sup\ny(5) <0.256.0>\n\n0x04c26908 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,46368}, {total_heap_size,75025}, {links,[<11993.303.0>,<11993.305.0>,<11993.256.0>]}, {memory,300576}, {message_queue_len,0}, {reductions,7580}, {trap_exit,true}]}, {<11993.303.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0305eac0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_cushion\ny(3) {state,moxi,5000,{1294,88108,383003},<0.304.0>}\ny(4) <0.303.0>\ny(5) <0.301.0>\n\n0x0305eadc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,1597}, {total_heap_size,1597}, {links,[<11993.301.0>,<11993.304.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,149}, {trap_exit,true}]}, {<11993.304.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x043ab9f4 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_port_server\ny(3) {state,#Port<0.4132>,moxi,{[\"2011-01-03 12:55:08: (cproxy_config.c.325) env: MOXI_SASL_PLAIN_PWD (7)\",\"2011-01-03 12:55:08: (cproxy_config.c.316) env: MOXI_SASL_PLAIN_USR (13)\"],[empty]},undefined,[],0}\ny(4) <0.304.0>\ny(5) <0.303.0>\n\n0x043aba10 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,2584}, {total_heap_size,5168}, {links,[<11993.303.0>,#Port<11993.4132>]}, {memory,21128}, {message_queue_len,0}, {reductions,305}, {trap_exit,true}]}, {<11993.305.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030a0da0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_cushion\ny(3) {state,memcached,5000,{1294,88108,383004},<0.306.0>}\ny(4) <0.305.0>\ny(5) <0.301.0>\n\n0x030a0dbc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,987}, {total_heap_size,987}, {links,[<11993.301.0>,<11993.306.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,55}, {trap_exit,true}]}, {<11993.306.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0307793c Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_port_server\ny(3) {state,#Port<0.4133>,memcached,{[\"Suspend eq_tapq:anon_170 for 1.00 secs\",\"Suspend eq_tapq:anon_169 for 1.00 secs\"],[\"Suspend eq_tapq:anon_75 for 1.00 secs\"]},undefined,[],0}\ny(4) <0.306.0>\ny(5) <0.305.0>\n\n0x03077958 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,63}]}, {heap_size,2584}, {total_heap_size,4181}, {links,[<11993.305.0>,#Port<11993.4133>]}, {memory,17180}, {message_queue_len,0}, {reductions,37579}, {trap_exit,true}]}, {<11993.307.0>, [{registered_name,ns_tick_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00f24db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03090e18 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,ns_pubsub,#Ref<0.0.0.1701>,{state,#Fun,ignored},<0.348.0>}]\ny(3) ns_tick_event\ny(4) <0.256.0>\n\n0x03090e30 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,169}]}, {heap_size,610}, {total_heap_size,987}, {links,[<11993.256.0>,<11993.348.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,35369}, {trap_exit,true}]}, {<11993.308.0>, [{registered_name,ns_stats_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00f24db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bf1b6c Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,ns_pubsub,#Ref<0.0.0.1923>,{state,#Fun,ignored},<0.349.0>}]\ny(3) ns_stats_event\ny(4) <0.256.0>\n\n0x04bf1b84 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1064}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<11993.349.0>,<11993.256.0>]}, {memory,8352}, {message_queue_len,0}, {reductions,123102}, {trap_exit,true}]}, {<11993.309.0>, [{registered_name,ns_good_bucket_worker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00f8bc60 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) work_queue\ny(3) []\ny(4) ns_good_bucket_worker\ny(5) <0.256.0>\n\n0x00f8bc7c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,5}]}, {heap_size,233}, {total_heap_size,610}, {links,[<11993.256.0>]}, {memory,2876}, {message_queue_len,0}, {reductions,239}, {trap_exit,false}]}, {<11993.310.0>, [{registered_name,ns_good_bucket_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04620ea8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_good_bucket_sup},one_for_one,[{child,<0.8889.0>,{ns_memcached,\"default\"},{ns_memcached,start_link,[\"default\"]},permanent,86400000,worker,[ns_memcached]},{child,<0.311.0>,{ns_vbm_sup,\"default\"},{ns_vbm_sup,start_link,[\"default\"]},permanent,1000,worker,[ns_vbm_sup]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},3,10,[{1294,91279,934211}],ns_bucket_sup,{ns_good_bucket_sup,#Fun,ns_good_bucket_worker}}\ny(4) ns_good_bucket_sup\ny(5) <0.256.0>\n\n0x04620ec4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links, [<11993.256.0>,<11993.311.0>,<11993.8889.0>, <11993.65.0>]}, {memory,71340}, {message_queue_len,0}, {reductions,2376}, {trap_exit,true}]}, {<11993.311.0>, [{registered_name,'ns_vbm_sup-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04f728b0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,'ns_vbm_sup-default'},one_for_one,[{child,<0.8353.0>,{child_id,[511,510,509,508,507,506,505,504,503,502,501,500,499,498,497,496,495,494,493,492,491,490,489,488,487,486,485,484,483,482,481,480,479,478,477,476,475,474,473,472,471,470,469,468,467,466,465,464,463,462,461,460,459,458,457,456,455,454,453,452,451,450,449,448,447,446,445,444,443,442,441,440,439,438,437,436,435,434,433,432,431,430,429,428,427,426,425,424,423,422,421,420,419,418,417,416,415,414,413,412,411,410,409,408,407,406,405,404,403,402,401,400,399,398,397,396,395,394,393,392,391,390,389,388,387,386,385,384,383,382,381,380,379,378,377,376,375,374,373,372,371,370,369,368,367,366,365,364,363,362,361,360,359,358,357,356,355,354,353,352,351,350,349,348,347,346,345,344,343,342,341,340,339,338,337,336,335,334,333,332,331,330,329,328,327,326,325,324,323,322,321,320,319,318,317,316,315,314,313,312,311,310,309,308,307,306,305,304,303,302,301,300,299,298,297,296,295,294,293,292,291,290,289,288,287,286,285,284,283,282,281,280,279,278,277,276,275,274,273,272,271,270,269,268,267,266,265,264,263,262,261,260,259,258,257,256,255,254,253,252,251,250,249,248,247,246,245,244,243,242,241,240,239,238,237,236,235,234,233,232,231,230,229,228,227,226,225,224,223,222,221,220,219,218,217,216,215,214,213,212,211,210,209,208,207,206,205,204,203,202,201,200,199,198,197,196,195,194,193,192,191,190,189,188,187,186,185,184,183,182,181,180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165,164,163],'ns_1@10.2.1.100'},{ns_port_server,start_link,[vbucketmigrator,\"./bin/vbucketmigrator/vbucketmigrator\",[\"-e\",\"-a\",\"default\",\"-h\",\"10.2.1.101:11210\",\"-d\",\"10.2.1.100:11210\",\"-A\",\"-v\",\"-b\",\"511\",\"-b\",\"510\",\"-b\",\"509\",\"-b\",\"508\",\"-b\",\"507\",\"-b\",\"506\",\"-b\",\"505\",\"-b\",\"504\",\"-b\",\"503\",\"-b\",\"502\",\"-b\",\"501\",\"-b\",\"500\",\"-b\",\"499\",\"-b\",\"498\",\"-b\",\"497\",\"-b\",\"496\",\"-b\",\"495\",\"-b\",\"494\",\"-b\",\"493\",\"-b\",\"492\",\"-b\",\"491\",\"-b\",\"490\",\"-b\",\"489\",\"-b\",\"488\",\"-b\",\"487\",\"-b\",\"486\",\"-b\",\"485\",\"-b\",\"484\",\"-b\",\"483\",\"-b\",\"482\",\"-b\",\"481\",\"-b\",\"480\",\"-b\",\"479\",\"-b\",\"478\",\"-b\",\"477\",\"-b\",\"476\",\"-b\",\"475\",\"-b\",\"474\",\"-b\",\"473\",\"-b\",\"472\",\"-b\",\"471\",\"-b\",\"470\",\"-b\",\"469\",\"-b\",\"468\",\"-b\",\"467\",\"-b\",\"466\",\"-b\",\"465\",\"-b\",\"464\",\"-b\",\"463\",\"-b\",\"462\",\"-b\",\"461\",\"-b\",\"460\",\"-b\",\"459\",\"-b\",\"458\",\"-b\",\"457\",\"-b\",\"456\",\"-b\",\"455\",\"-b\",\"454\",\"-b\",\"453\",\"-b\",\"452\",\"-b\",\"451\",\"-b\",\"450\",\"-b\",\"449\",\"-b\",\"448\",\"-b\",\"447\",\"-b\",\"446\",\"-b\",\"445\",\"-b\",\"444\",\"-b\",\"443\",\"-b\",\"442\",\"-b\",\"441\",\"-b\",\"440\",\"-b\",\"439\",\"-b\",\"438\",\"-b\",\"437\",\"-b\",\"436\",\"-b\",\"435\",\"-b\",\"434\",\"-b\",\"433\",\"-b\",\"432\",\"-b\",\"431\",\"-b\",\"430\",\"-b\",\"429\",\"-b\",\"428\",\"-b\",\"427\",\"-b\",\"426\",\"-b\",\"425\",\"-b\",\"424\",\"-b\",\"423\",\"-b\",\"422\",\"-b\",\"421\",\"-b\",\"420\",\"-b\",\"419\",\"-b\",\"418\",\"-b\",\"417\",\"-b\",\"416\",\"-b\",\"415\",\"-b\",\"414\",\"-b\",\"413\",\"-b\",\"412\",\"-b\",\"411\",\"-b\",\"410\",\"-b\",\"409\",\"-b\",\"408\",\"-b\",\"407\",\"-b\",\"406\",\"-b\",\"405\",\"-b\",\"404\",\"-b\",\"403\",\"-b\",\"402\",\"-b\",\"401\",\"-b\",\"400\",\"-b\",\"399\",\"-b\",\"398\",\"-b\",\"397\",\"-b\",\"396\",\"-b\",\"395\",\"-b\",\"394\",\"-b\",\"393\",\"-b\",\"392\",\"-b\",\"391\",\"-b\",\"390\",\"-b\",\"389\",\"-b\",\"388\",\"-b\",\"387\",\"-b\",\"386\",\"-b\",\"385\",\"-b\",\"384\",\"-b\",\"383\",\"-b\",\"382\",\"-b\",\"381\",\"-b\",\"380\",\"-b\",\"379\",\"-b\",\"378\",\"-b\",\"377\",\"-b\",\"376\",\"-b\",\"375\",\"-b\",\"374\",\"-b\",\"373\",\"-b\",\"372\",\"-b\",\"371\",\"-b\",\"370\",\"-b\",\"369\",\"-b\",\"368\",\"-b\",\"367\",\"-b\",\"366\",\"-b\",\"365\",\"-b\",\"364\",\"-b\",\"363\",\"-b\",\"362\",\"-b\",\"361\",\"-b\",\"360\",\"-b\",\"359\",\"-b\",\"358\",\"-b\",\"357\",\"-b\",\"356\",\"-b\",\"355\",\"-b\",\"354\",\"-b\",\"353\",\"-b\",\"352\",\"-b\",\"351\",\"-b\",\"350\",\"-b\",\"349\",\"-b\",\"348\",\"-b\",\"347\",\"-b\",\"346\",\"-b\",\"345\",\"-b\",\"344\",\"-b\",\"343\",\"-b\",\"342\",\"-b\",\"341\",\"-b\",\"340\",\"-b\",\"339\",\"-b\",\"338\",\"-b\",\"337\",\"-b\",\"336\",\"-b\",\"335\",\"-b\",\"334\",\"-b\",\"333\",\"-b\",\"332\",\"-b\",\"331\",\"-b\",\"330\",\"-b\",\"329\",\"-b\",\"328\",\"-b\",\"327\",\"-b\",\"326\",\"-b\",\"325\",\"-b\",\"324\",\"-b\",\"323\",\"-b\",\"322\",\"-b\",\"321\",\"-b\",\"320\",\"-b\",\"319\",\"-b\",\"318\",\"-b\",\"317\",\"-b\",\"316\",\"-b\",\"315\",\"-b\",\"314\",\"-b\",\"313\",\"-b\",\"312\",\"-b\",\"311\",\"-b\",\"310\",\"-b\",\"309\",\"-b\",\"308\",\"-b\",\"307\",\"-b\",\"306\",\"-b\",\"305\",\"-b\",\"304\",\"-b\",\"303\",\"-b\",\"302\",\"-b\",\"301\",\"-b\",\"300\",\"-b\",\"299\",\"-b\",\"298\",\"-b\",\"297\",\"-b\",\"296\",\"-b\",\"295\",\"-b\",\"294\",\"-b\",\"293\",\"-b\",\"292\",\"-b\",\"291\",\"-b\",\"290\",\"-b\",\"289\",\"-b\",\"288\",\"-b\",\"287\",\"-b\",\"286\",\"-b\",\"285\",\"-b\",\"284\",\"-b\",\"283\",\"-b\",\"282\",\"-b\",\"281\",\"-b\",\"280\",\"-b\",\"279\",\"-b\",\"278\",\"-b\",\"277\",\"-b\",\"276\",\"-b\",\"275\",\"-b\",\"274\",\"-b\",\"273\",\"-b\",\"272\",\"-b\",\"271\",\"-b\",\"270\",\"-b\",\"269\",\"-b\",\"268\",\"-b\",\"267\",\"-b\",\"266\",\"-b\",\"265\",\"-b\",\"264\",\"-b\",\"263\",\"-b\",\"262\",\"-b\",\"261\",\"-b\",\"260\",\"-b\",\"259\",\"-b\",\"258\",\"-b\",\"257\",\"-b\",\"256\",\"-b\",\"255\",\"-b\",\"254\",\"-b\",\"253\",\"-b\",\"252\",\"-b\",\"251\",\"-b\",\"250\",\"-b\",\"249\",\"-b\",\"248\",\"-b\",\"247\",\"-b\",\"246\",\"-b\",\"245\",\"-b\",\"244\",\"-b\",\"243\",\"-b\",\"242\",\"-b\",\"241\",\"-b\",\"240\",\"-b\",\"239\",\"-b\",\"238\",\"-b\",\"237\",\"-b\",\"236\",\"-b\",\"235\",\"-b\",\"234\",\"-b\",\"233\",\"-b\",\"232\",\"-b\",\"231\",\"-b\",\"230\",\"-b\",\"229\",\"-b\",\"228\",\"-b\",\"227\",\"-b\",\"226\",\"-b\",\"225\",\"-b\",\"224\",\"-b\",\"223\",\"-b\",\"222\",\"-b\",\"221\",\"-b\",\"220\",\"-b\",\"219\",\"-b\",\"218\",\"-b\",\"217\",\"-b\",\"216\",\"-b\",\"215\",\"-b\",\"214\",\"-b\",\"213\",\"-b\",\"212\",\"-b\",\"211\",\"-b\",\"210\",\"-b\",\"209\",\"-b\",\"208\",\"-b\",\"207\",\"-b\",\"206\",\"-b\",\"205\",\"-b\",\"204\",\"-b\",\"203\",\"-b\",\"202\",\"-b\",\"201\",\"-b\",\"200\",\"-b\",\"199\",\"-b\",\"198\",\"-b\",\"197\",\"-b\",\"196\",\"-b\",\"195\",\"-b\",\"194\",\"-b\",\"193\",\"-b\",\"192\",\"-b\",\"191\",\"-b\",\"190\",\"-b\",\"189\",\"-b\",\"188\",\"-b\",\"187\",\"-b\",\"186\",\"-b\",\"185\",\"-b\",\"184\",\"-b\",\"183\",\"-b\",\"182\",\"-b\",\"181\",\"-b\",\"180\",\"-b\",\"179\",\"-b\",\"178\",\"-b\",\"177\",\"-b\",\"176\",\"-b\",\"175\",\"-b\",\"174\",\"-b\",\"173\",\"-b\",\"172\",\"-b\",\"171\",\"-b\",\"170\",\"-b\",\"169\",\"-b\",\"168\",\"-b\",\"167\",\"-b\",\"166\",\"-b\",\"165\",\"-b\",\"164\",\"-b\",\"163\"],[use_stdio,stderr_to_stdout,{write_data,[[],\"\\n\"]}]]},permanent,10,worker,[ns_port_server]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_vbm_sup,[]}\ny(4) 'ns_vbm_sup-default'\ny(5) <0.310.0>\n\n0x04f728cc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,28657}, {total_heap_size,75025}, {links,[<11993.310.0>,<11993.8353.0>]}, {memory,300556}, {message_queue_len,0}, {reductions,19241}, {trap_exit,true}]}, {<11993.313.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03f1c57c (misc:wait_for_process/2 + 104)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fcd8a0 Return addr 0x03f1f300 (misc:'-start_singleton/4-fun-0-'/2 + 56)\ny(0) []\ny(1) #Ref<0.0.0.1543>\ny(2) infinity\n\n0x00fcd8b0 Return addr 0x00aec194 ()\ny(0) <11585.110.0>\ny(1) ns_orchestrator\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.256.0>]}, {memory,1348}, {message_queue_len,0}, {reductions,6}, {trap_exit,false}]}, {<11993.314.0>, [{registered_name,ns_mnesia}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030750d0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_mnesia\ny(3) {state}\ny(4) ns_mnesia\ny(5) <0.256.0>\n\n0x030750ec Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<11993.323.0>,<11993.256.0>]}, {memory,10792}, {message_queue_len,0}, {reductions,1477}, {trap_exit,true}]}, {<11993.315.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03f1f748 (misc:'-wait_for_process/2-fun-0-'/3 + 112)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fc65c4 Return addr 0x00aec194 ()\ny(0) []\ny(1) <0.313.0>\ny(2) #Ref<0.0.0.1543>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.110.0>]}, {memory,1364}, {message_queue_len,0}, {reductions,13}, {trap_exit,true}]}, {<11993.317.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0325fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0464dd4c Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.318.0>,{appl_data,mnesia,[mnesia_dumper_load_regulator,mnesia_event,mnesia_fallback,mnesia_controller,mnesia_kernel_sup,mnesia_late_loader,mnesia_locker,mnesia_monitor,mnesia_recover,mnesia_substr,mnesia_sup,mnesia_tm],undefined,{mnesia_sup,[]},[mnesia,mnesia_backup,mnesia_bup,mnesia_checkpoint,mnesia_checkpoint_sup,mnesia_controller,mnesia_dumper,mnesia_event,mnesia_frag,mnesia_frag_hash,mnesia_frag_old_hash,mnesia_index,mnesia_kernel_sup,mnesia_late_loader,mnesia_lib,mnesia_loader,mnesia_locker,mnesia_log,mnesia_monitor,mnesia_recover,mnesia_registry,mnesia_schema,mnesia_snmp_hook,mnesia_snmp_sup,mnesia_subscr,mnesia_sup,mnesia_sp,mnesia_text,mnesia_tm],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x0464dd5c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<11993.7.0>,<11993.318.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,81}, {trap_exit,true}]}, {<11993.318.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032610fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fcf4b8 Return addr 0x00aec194 ()\ny(0) {normal,[]}\ny(1) mnesia_sup\ny(2) <0.319.0>\ny(3) <0.317.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.317.0>,<11993.319.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,32}, {trap_exit,true}]}, {<11993.319.0>, [{registered_name,mnesia_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fccd40 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_sup},one_for_all,[{child,<0.321.0>,mnesia_kernel_sup,{mnesia_kernel_sup,start,[]},permanent,infinity,supervisor,[mnesia_kernel_sup,supervisor]},{child,<0.320.0>,mnesia_event,{mnesia_sup,start_event,[]},permanent,30000,worker,[mnesia_event,gen_event]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,3600,[],mnesia_sup,[[]]}\ny(4) mnesia_sup\ny(5) <0.318.0>\n\n0x00fccd5c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<11993.320.0>,<11993.321.0>,<11993.318.0>]}, {memory,3492}, {message_queue_len,0}, {reductions,198}, {trap_exit,true}]}, {<11993.320.0>, [{registered_name,mnesia_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00f24db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030a26b4 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,mnesia_event,false,{state,[],false,[]},false}]\ny(3) mnesia_event\ny(4) <0.319.0>\n\n0x030a26cc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<11993.319.0>,<11993.323.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,428}, {trap_exit,true}]}, {<11993.321.0>, [{registered_name,mnesia_kernel_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fc3fa8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_kernel_sup},one_for_all,[{child,<0.330.0>,mnesia_late_loader,{mnesia_late_loader,start,[]},permanent,3000,worker,[mnesia_late_loader,mnesia_monitor,proc_lib]},{child,<0.329.0>,mnesia_controller,{mnesia_controller,start,[]},permanent,3000,worker,[mnesia_controller,gen_server]},{child,<0.328.0>,mnesia_snmp_sup,{mnesia_snmp_sup,start,[]},permanent,infinity,supervisor,[mnesia_snmp_sup,supervisor]},{child,<0.327.0>,mnesia_checkpoint_sup,{mnesia_checkpoint_sup,start,[]},permanent,infinity,supervisor,[mnesia_checkpoint_sup,supervisor]},{child,<0.326.0>,mnesia_tm,{mnesia_tm,start,[]},permanent,30000,worker,[mnesia_tm,mnesia_monitor,proc_lib]},{child,<0.325.0>,mnesia_recover,{mnesia_recover,start,[]},permanent,180000,worker,[mnesia_recover,gen_server]},{child,<0.324.0>,mnesia_locker,{mnesia_locker,start,[]},permanent,3000,worker,[mnesia_locker,mnesia_monitor,proc_lib]},{child,<0.323.0>,mnesia_subscr,{mnesia_subscr,start,[]},permanent,3000,worker,[mnesia_subscr,gen_server]},{child,<0.322.0>,mnesia_monitor,{mnesia_monitor,start,[]},permanent,3000,worker,[mnesia_monitor,gen_server]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,86400000,[],mnesia_kernel_sup,[]}\ny(4) mnesia_kernel_sup\ny(5) <0.319.0>\n\n0x00fc3fc4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,7}]}, {heap_size,377}, {total_heap_size,754}, {links, [<11993.324.0>,<11993.328.0>,<11993.329.0>, <11993.330.0>,<11993.326.0>,<11993.327.0>, <11993.325.0>,<11993.322.0>,<11993.323.0>, <11993.319.0>]}, {memory,3632}, {message_queue_len,0}, {reductions,551}, {trap_exit,true}]}, {<11993.322.0>, [{registered_name,mnesia_monitor}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0563fc04 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_monitor\ny(3) {state,<0.321.0>,[],[],true,[],undefined,[]}\ny(4) mnesia_monitor\ny(5) <0.321.0>\n\n0x0563fc20 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,59}]}, {heap_size,2584}, {total_heap_size,3194}, {links,[<11993.333.0>,<11993.321.0>]}, {memory,13232}, {message_queue_len,0}, {reductions,9210}, {trap_exit,true}]}, {<11993.323.0>, [{registered_name,mnesia_subscr}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fb3798 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_subscr\ny(3) {state,<0.321.0>,57372}\ny(4) mnesia_subscr\ny(5) <0.321.0>\n\n0x00fb37b4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.320.0>,<11993.321.0>,<11993.314.0>]}, {memory,1408}, {message_queue_len,0}, {reductions,111}, {trap_exit,true}]}, {<11993.324.0>, [{registered_name,mnesia_locker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x044db9d8 (mnesia_locker:loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0563c418 Return addr 0x044af438 (mnesia_sp:init_proc/4 + 132)\ny(0) []\ny(1) []\ny(2) []\ny(3) []\ny(4) []\ny(5) {state,<0.321.0>}\n\n0x0563c434 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) Catch 0x044af438 (mnesia_sp:init_proc/4 + 132)\ny(1) mnesia_locker\ny(2) []\ny(3) []\ny(4) [<0.321.0>]\n\n0x0563c44c Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,949}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<11993.321.0>]}, {memory,8332}, {message_queue_len,0}, {reductions,450911}, {trap_exit,true}]}, {<11993.325.0>, [{registered_name,mnesia_recover}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d42be0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_recover\ny(3) {state,<0.321.0>,undefined,undefined,undefined,0,true,[]}\ny(4) mnesia_recover\ny(5) <0.321.0>\n\n0x00d42bfc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,9}]}, {heap_size,377}, {total_heap_size,754}, {links,[<11993.321.0>,<11993.57.0>]}, {memory,3472}, {message_queue_len,0}, {reductions,6163}, {trap_exit,true}]}, {<11993.326.0>, [{registered_name,mnesia_tm}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x044e3e04 (mnesia_tm:doit_loop/1 + 108)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d3c58c Return addr 0x044af438 (mnesia_sp:init_proc/4 + 132)\ny(0) []\ny(1) []\ny(2) {state,{0,nil},{0,nil},<0.321.0>,[],[],[]}\ny(3) []\ny(4) []\ny(5) <0.321.0>\ny(6) {0,nil}\ny(7) {0,nil}\n\n0x00d3c5b0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) Catch 0x044af438 (mnesia_sp:init_proc/4 + 132)\ny(1) mnesia_tm\ny(2) []\ny(3) []\ny(4) [<0.321.0>]\n\n0x00d3c5c8 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,940}]}, {heap_size,610}, {total_heap_size,987}, {links,[<11993.321.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,1031669}, {trap_exit,true}]}, {<11993.327.0>, [{registered_name,mnesia_checkpoint_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fcbdb0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_checkpoint_sup},simple_one_for_one,[{child,undefined,mnesia_checkpoint_sup,{mnesia_checkpoint,start,[]},transient,3000,worker,[mnesia_checkpoint_sup,mnesia_checkpoint,supervisor]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,86400000,[],mnesia_checkpoint_sup,[]}\ny(4) mnesia_checkpoint_sup\ny(5) <0.321.0>\n\n0x00fcbdcc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.321.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,61}, {trap_exit,true}]}, {<11993.328.0>, [{registered_name,mnesia_snmp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fb2ca0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_snmp_sup},simple_one_for_one,[{child,undefined,mnesia_snmp_sup,{mnesia_snmp_hook,start,[]},transient,3000,worker,[mnesia_snmp_sup,mnesia_snmp_hook,supervisor]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,86400000,[],mnesia_snmp_sup,[]}\ny(4) mnesia_snmp_sup\ny(5) <0.321.0>\n\n0x00fb2cbc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.321.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,61}, {trap_exit,true}]}, {<11993.329.0>, [{registered_name,mnesia_controller}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0540f0e4 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_controller\ny(3) {state,<0.321.0>,true,[],[],{0,nil},[],[],{0,nil},undefined,[],[],{interval,#Ref<0.0.0.1580>},false}\ny(4) mnesia_controller\ny(5) <0.321.0>\n\n0x0540f100 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,5}]}, {heap_size,610}, {total_heap_size,987}, {links,[<11993.321.0>,<11993.57.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,1047}, {trap_exit,true}]}, {<11993.330.0>, [{registered_name,mnesia_late_loader}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0453f47c (mnesia_late_loader:loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00fc7e7c Return addr 0x044af438 (mnesia_sp:init_proc/4 + 132)\ny(0) []\ny(1) []\ny(2) {state,<0.321.0>}\n\n0x00fc7e8c Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) Catch 0x044af438 (mnesia_sp:init_proc/4 + 132)\ny(1) mnesia_late_loader\ny(2) []\ny(3) []\ny(4) [<0.321.0>]\n\n0x00fc7ea4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<11993.321.0>]}, {memory,1944}, {message_queue_len,0}, {reductions,178}, {trap_exit,false}]}, {<11993.333.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x047853e8 (disk_log:loop/1 + 84)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bf8ecc Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) {state,[],[],<0.129.0>,<0.130.0>,224,{arg,latest_log,undefined,\"c:/Program Files/Membase/Server/Mnesia.ns_1@10.2.1.101/LATEST.LOG\",true,infinity,halt,false,internal,<0.322.0>,none,read_write,true,[{notify,true},{file,\"c:/Program Files/Membase/Server/Mnesia.ns_1@10.2.1.101/LATEST.LOG\"},{name,latest_log},{repair,true},{mode,read_write}]},ok,ok}\n\n0x04bf8ed4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,40}]}, {heap_size,1597}, {total_heap_size,3194}, {links, [<11993.130.0>,<11993.322.0>,<11993.129.0>, #Port<11993.5860>]}, {memory,13272}, {message_queue_len,0}, {reductions,333486}, {trap_exit,true}]}, {<11993.346.0>, [{registered_name,ns_bad_bucket_worker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0304bac0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) work_queue\ny(3) []\ny(4) ns_bad_bucket_worker\ny(5) <0.256.0>\n\n0x0304badc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<11993.256.0>]}, {memory,6824}, {message_queue_len,0}, {reductions,291}, {trap_exit,false}]}, {<11993.347.0>, [{registered_name,ns_bad_bucket_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bdd4a0 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_bad_bucket_sup},one_for_one,[{child,<0.387.0>,{stats_reader,\"default\"},{stats_reader,start_link,[\"default\"]},permanent,10,worker,[stats_reader]},{child,<0.349.0>,{stats_archiver,\"default\"},{stats_archiver,start_link,[\"default\"]},permanent,10,worker,[stats_archiver]},{child,<0.348.0>,{stats_collector,\"default\"},{stats_collector,start_link,[\"default\"]},permanent,10,worker,[stats_collector]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},3,10,[],ns_bucket_sup,{ns_bad_bucket_sup,#Fun,ns_bad_bucket_worker}}\ny(4) ns_bad_bucket_sup\ny(5) <0.256.0>\n\n0x04bdd4bc Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links, [<11993.256.0>,<11993.349.0>,<11993.387.0>, <11993.348.0>,<11993.65.0>]}, {memory,71360}, {message_queue_len,0}, {reductions,1559}, {trap_exit,true}]}, {<11993.348.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00f2dbb0 (gen:wait_resp_mon/3 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d50d04 Return addr 0x03229274 (gen_server:call/3 + 80)\ny(0) 30000\ny(1) #Ref<0.0.0.150741>\ny(2) 'ns_1@10.2.1.101'\n\n0x00d50d14 Return addr 0x049b722c (stats_collector:handle_info/2 + 164)\ny(0) 30000\ny(1) {stats,<<0 bytes>>}\ny(2) 'ns_memcached-default'\ny(3) Catch 0x03229274 (gen_server:call/3 + 80)\n\n0x00d50d28 Return addr 0x0322b948 (gen_server:handle_msg/5 + 932)\ny(0) {state,\"default\",[6889099676,226289950,0,0,0,0,4651329,0,0,0,0,0,0,0,0,704990,3096257,2149545,792334,0,0,0,622340,942395,0],44,1294091287748}\ny(1) 1294091288747\ny(2) 1294091287748\ny(3) 44\ny(4) [6889099676,226289950,0,0,0,0,4651329,0,0,0,0,0,0,0,0,704990,3096257,2149545,792334,0,0,0,622340,942395,0]\ny(5) \"default\"\ny(6) Catch 0x049b722c (stats_collector:handle_info/2 + 164)\n\n0x00d50d48 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) stats_collector\ny(1) {state,\"default\",[6889099676,226289950,0,0,0,0,4651329,0,0,0,0,0,0,0,0,704990,3096257,2149545,792334,0,0,0,622340,942395,0],44,1294091287748}\ny(2) <0.348.0>\ny(3) <0.347.0>\ny(4) {tick,1294091288747}\ny(5) Catch 0x0322b948 (gen_server:handle_msg/5 + 932)\n\n0x00d50d64 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,4181}, {total_heap_size,4181}, {links,[<11993.347.0>,<11993.307.0>]}, {memory,17252}, {message_queue_len,2}, {reductions,19775328}, {trap_exit,false}]}, {<11993.349.0>, [{registered_name,'stats_archiver-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0455d388 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) stats_archiver\ny(3) {state,\"default\"}\ny(4) 'stats_archiver-default'\ny(5) <0.347.0>\n\n0x0455d3a4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,225}]}, {heap_size,6765}, {total_heap_size,17711}, {links,[<11993.308.0>,<11993.347.0>,<11993.57.0>]}, {memory,71320}, {message_queue_len,0}, {reductions,16943121}, {trap_exit,false}]}, {<11993.387.0>, [{registered_name,'stats_reader-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x063cc8b4 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) stats_reader\ny(3) {state,\"default\"}\ny(4) 'stats_reader-default'\ny(5) <0.347.0>\n\n0x063cc8d0 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,11}]}, {heap_size,832040}, {total_heap_size,1346269}, {links,[<11993.347.0>]}, {memory,5385512}, {message_queue_len,0}, {reductions,9117828}, {trap_exit,false}]}, {<11993.388.0>, [{registered_name,ns_moxi_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bee968 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_moxi_sup},one_for_one,[],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_moxi_sup,[]}\ny(4) ns_moxi_sup\ny(5) <0.256.0>\n\n0x04bee984 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links,[<11993.256.0>,<11993.65.0>]}, {memory,71300}, {message_queue_len,0}, {reductions,2259}, {trap_exit,true}]}, {<11993.389.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03f1c57c (misc:wait_for_process/2 + 104)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03078c68 Return addr 0x03f1f300 (misc:'-start_singleton/4-fun-0-'/2 + 56)\ny(0) []\ny(1) #Ref<0.0.0.1929>\ny(2) infinity\n\n0x03078c78 Return addr 0x00aec194 ()\ny(0) <11585.152.0>\ny(1) ns_tick\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<11993.256.0>]}, {memory,1348}, {message_queue_len,0}, {reductions,6}, {trap_exit,false}]}, {<11993.390.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03f1f748 (misc:'-wait_for_process/2-fun-0-'/3 + 112)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x045612dc Return addr 0x00aec194 ()\ny(0) []\ny(1) <0.389.0>\ny(2) #Ref<0.0.0.1929>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.152.0>]}, {memory,1364}, {message_queue_len,0}, {reductions,13}, {trap_exit,true}]}, {<11993.3488.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,do_setup,6}}, {backtrace, <<"Program counter: 0x044784b4 (dist_util:con_loop/9 + 72)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d4c5c4 Return addr 0x00aec194 ()\ny(0) []\ny(1) #Fun\ny(2) #Fun\ny(3) {tick,6364,10365,3,3}\ny(4) normal\ny(5) 'ns_1@10.2.1.101'\ny(6) {net_address,{{10,2,1,102},21100},\"10.2.1.102\",tcp,inet}\ny(7) #Port<0.5101>\ny(8) 'ns_1@10.2.1.102'\ny(9) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,16}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<11993.21.0>,#Port<11993.5101>]}, {memory,12240}, {message_queue_len,0}, {reductions,4107}, {trap_exit,false}]}, {<11993.7646.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x04457d3c (menelaus_web:handle_pool_info_wait/6 + 140)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04f9fd08 Return addr 0x044572c0 (menelaus_web:loop/3 + 12080)\ny(0) \"29164773\"\ny(1) \"29164773\"\ny(2) 3000\ny(3) \"127.0.0.1\"\ny(4) {\"Administrator\",\"j4958ph\"}\ny(5) \"default\"\ny(6) {mochiweb_request,#Port<0.5749>,'GET',\"/pools/default?waitChange=3000&etag=29164773\",{1,1},{15,{\"host\",{'Host',\"localhost:8091\"},{\"accept\",{'Accept',\"application/json, text/javascript, */*\"},nil,{\"accept-language\",{'Accept-Language',\"en-us,en;q=0.5\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip,deflate\"},{\"accept-charset\",{'Accept-Charset',\"ISO-8859-1,utf-8;q=0.7,*;q=0.7\"},nil,nil},nil},{\"connection\",{'Connection',\"keep-alive\"},{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,{\"cache-control\",{'Cache-Control',\"no-cache\"},nil,nil}},{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,nil}}}},{\"user-agent\",{'User-Agent',\"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13\"},{\"keep-alive\",{'Keep-Alive',\"115\"},{\"invalid-auth-response\",{\"invalid-auth-response\",\"on\"},nil,nil},{\"pragma\",{'Pragma',\"no-cache\"},nil,{\"referer\",{'Referer',\"http://localhost:8091/index.html\"},nil,nil}}},{\"x-requested-with\",{\"X-Requested-With\",\"XMLHttpRequest\"},nil,nil}}}}}\n\n0x04f9fd28 Return addr 0x04463be8 (mochiweb_http:headers/5 + 680)\ny(0) []\ny(1) []\ny(2) []\ny(3) []\ny(4) {mochiweb_request,#Port<0.5749>,'GET',\"/pools/default?waitChange=3000&etag=29164773\",{1,1},{15,{\"host\",{'Host',\"localhost:8091\"},{\"accept\",{'Accept',\"application/json, text/javascript, */*\"},nil,{\"accept-language\",{'Accept-Language',\"en-us,en;q=0.5\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip,deflate\"},{\"accept-charset\",{'Accept-Charset',\"ISO-8859-1,utf-8;q=0.7,*;q=0.7\"},nil,nil},nil},{\"connection\",{'Connection',\"keep-alive\"},{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,{\"cache-control\",{'Cache-Control',\"no-cache\"},nil,nil}},{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,nil}}}},{\"user-agent\",{'User-Agent',\"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13\"},{\"keep-alive\",{'Keep-Alive',\"115\"},{\"invalid-auth-response\",{\"invalid-auth-response\",\"on\"},nil,nil},{\"pragma\",{'Pragma',\"no-cache\"},nil,{\"referer\",{'Referer',\"http://localhost:8091/index.html\"},nil,nil}}},{\"x-requested-with\",{\"X-Requested-With\",\"XMLHttpRequest\"},nil,nil}}}}}\ny(5) Catch 0x04457308 (menelaus_web:loop/3 + 12152)\n\n0x04f9fd44 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) []\ny(2) []\ny(3) {mochiweb_request,#Port<0.5749>,'GET',\"/pools/default?waitChange=3000&etag=29164773\",{1,1},{15,{\"host\",{'Host',\"localhost:8091\"},{\"accept\",{'Accept',\"application/json, text/javascript, */*\"},nil,{\"accept-language\",{'Accept-Language',\"en-us,en;q=0.5\"},{\"accept-encoding\",{'Accept-Encoding',\"gzip,deflate\"},{\"accept-charset\",{'Accept-Charset',\"ISO-8859-1,utf-8;q=0.7,*;q=0.7\"},nil,nil},nil},{\"connection\",{'Connection',\"keep-alive\"},{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,{\"cache-control\",{'Cache-Control',\"no-cache\"},nil,nil}},{\"cookie\",{'Cookie',\"auth=QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},nil,nil}}}},{\"user-agent\",{'User-Agent',\"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13\"},{\"keep-alive\",{'Keep-Alive',\"115\"},{\"invalid-auth-response\",{\"invalid-auth-response\",\"on\"},nil,nil},{\"pragma\",{'Pragma',\"no-cache\"},nil,{\"referer\",{'Referer',\"http://localhost:8091/index.html\"},nil,nil}}},{\"x-requested-with\",{\"X-Requested-With\",\"XMLHttpRequest\"},nil,nil}}}}}\n\n0x04f9fd58 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,46368}, {total_heap_size,50549}, {links,[<11993.297.0>,#Port<11993.5749>]}, {memory,202732}, {message_queue_len,0}, {reductions,7851284}, {trap_exit,false}]}, {<11993.8353.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04fdcda8 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_port_server\ny(3) {state,#Port<0.5833>,vbucketmigrator,{[\"Authenticated towards: {Sock 10.2.1.101:11210}\",\"Authenticating towards: {Sock 10.2.1.101:11210}\"],[\"Connecting to {Sock 10.2.1.101:11210}\"]},undefined,[],0}\ny(4) <0.8353.0>\ny(5) <0.311.0>\n\n0x04fdcdc4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,6765}, {total_heap_size,6765}, {links,[<11993.311.0>,#Port<11993.5833>]}, {memory,27516}, {message_queue_len,0}, {reductions,243}, {trap_exit,true}]}, {<11993.8877.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00f192d8 (prim_inet:accept0/2 + 92)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d51920 Return addr 0x032c8894 (inet_tcp:accept/1 + 20)\ny(0) 21572\ny(1) #Port<0.4131>\n\n0x00d5192c Return addr 0x0446af1c (mochiweb_socket_server:acceptor_loop/1 + 80)\ny(0) []\n\n0x00d51934 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) <0.297.0>\ny(2) Catch 0x0446af1c (mochiweb_socket_server:acceptor_loop/1 + 80)\n\n0x00d51944 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<11993.297.0>]}, {memory,1984}, {message_queue_len,0}, {reductions,18}, {trap_exit,false}]}, {<11993.8889.0>, [{registered_name,'ns_memcached-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00f19d08 (prim_inet:recv0/3 + 112)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00d55a34 Return addr 0x049a933c (mc_binary:recv/3 + 92)\ny(0) 29797\ny(1) #Port<0.5899>\n\n0x00d55a40 Return addr 0x049b4f9c (mc_client_binary:cmd_binary_vocal_recv/5 + 96)\ny(0) 5000\ny(1) res\ny(2) #Port<0.5899>\n\n0x00d55a50 Return addr 0x049b59dc (mc_client_binary:delete_vbucket/2 + 156)\ny(0) []\ny(1) []\ny(2) undefined\ny(3) undefined\ny(4) undefined\ny(5) #Port<0.5899>\ny(6) 63\n\n0x00d55a70 Return addr 0x0444c410 (ns_memcached:handle_call/3 + 208)\n\n0x00d55a74 Return addr 0x0322b63c (gen_server:handle_msg/5 + 152)\ny(0) {state,\"default\",#Port<0.5899>}\ny(1) 40\ny(2) #Port<0.5899>\n\n0x00d55a84 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) ns_memcached\ny(1) {state,\"default\",#Port<0.5899>}\ny(2) <0.8889.0>\ny(3) <0.310.0>\ny(4) {delete_vbucket,40}\ny(5) {<11585.11992.0>,#Ref<11585.0.0.246373>}\ny(6) Catch 0x0322b63c (gen_server:handle_msg/5 + 152)\n\n0x00d55aa4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,128}]}, {heap_size,4181}, {total_heap_size,32838}, {links,[<11993.57.0>,<11993.310.0>,#Port<11993.5899>]}, {memory,132020}, {message_queue_len,3}, {reductions,251928}, {trap_exit,true}]}, {<11993.8890.0>, [{registered_name,hot_keys_keeper}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03229e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04567c88 Return addr 0x00f3d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) hot_keys_keeper\ny(3) {state,[{\"default\",[{\"Assdfdf88545244\",[{ops,3.267974e-003}]},{\"Assdfdf1304300285\",[{ops,3.267974e-003}]},{\"Assdfdf623240301\",[{ops,3.267974e-003}]},{\"Assdfdf796192375\",[{ops,3.267974e-003}]},{\"Assdfdf331631225\",[{ops,3.267974e-003}]},{\"Assdfdf88251782\",[{ops,3.267974e-003}]},{\"Assdfdf17166682\",[{ops,3.267974e-003}]},{\"Assdfdf1876246878\",[{ops,3.267974e-003}]},{\"Assdfdf475099662\",[{ops,3.267974e-003}]},{\"Assdfdf1397088614\",[{ops,3.267974e-003}]}]}],[{\"default\",[{\"Assdfdf475099662\",[{ops,1.628664e-003}]},{\"Assdfdf1196676958\",[{ops,3.257329e-003}]},{\"Assdfdf2059899287\",[{ops,1.628664e-003}]},{\"Assdfdf1009960967\",[{ops,1.628664e-003}]},{\"Assdfdf1876246878\",[{ops,1.628664e-003}]},{\"Assdfdf1630315261\",[{ops,1.628664e-003}]},{\"Assdfdf17166682\",[{ops,1.628664e-003}]},{\"Assdfdf757630399\",[{ops,1.628664e-003}]},{\"Assdfdf1421467409\",[{ops,3.257329e-003}]},{\"Assdfdf33927238\",[{ops,1.628664e-003}]},{\"Assdfdf380705567\",[{ops,1.628664e-003}]},{\"Assdfdf654266129\",[{ops,1.628664e-003}]},{\"Assdfdf409682300\",[{ops,1.628664e-003}]},{\"Assdfdf171735370\",[{ops,1.628664e-003}]},{\"Assdfdf865045733\",[{ops,1.628664e-003}]},{\"Assdfdf1948169090\",[{ops,1.628664e-003}]},{\"Assdfdf1118470971\",[{ops,1.628664e-003}]},{\"Assdfdf1625529305\",[{ops,1.628664e-003}]},{\"Assdfdf447889213\",[{ops,1.628664e-003}]},{\"Assdfdf1619151697\",[{ops,1.628664e-003}]},{\"Assdfdf88251782\",[{ops,1.628664e-003}]},{\"Assdfdf1288059078\",[{ops,3.257329e-003}]},{\"Assdfdf1070859031\",[{ops,1.628664e-003}]},{\"Assdfdf856022456\",[{ops,3.257329e-003}]},{\"Assdfdf1569036847\",[{ops,1.628664e-003}]},{\"Assdfdf222404535\",[{ops,1.628664e-003}]},{\"Assdfdf1480195671\",[{ops,1.628664e-003}]},{\"Assdfdf484918953\",[{ops,1.628664e-003}]},{\"Assdfdf1771703177\",[{ops,1.628664e-003}]},{\"Assdfdf1571608521\",[{ops,1.628664e-003}]},{\"Assdfdf1447771138\",[{ops,1.628664e-003}]},{\"Assdfdf402319505\",[{ops,1.628664e-003}]},{\"Assdfdf331631225\",[{ops,1.628664e-003}]},{\"Assdfdf250717573\",[{ops,3.257329e-003}]},{\"Assdfdf206848791\",[{ops,1.628664e-003}]},{\"Assdfdf1955197775\",[{ops,1.628664e-003}]},{\"Assdfdf796192375\",[{ops,1.628664e-003}]},{\"Assdfdf1269897464\",[{ops,1.628664e-003}]},{\"Assdfdf1978385082\",[{ops,1.628664e-003}]},{\"Assdfdf1529028110\",[{ops,3.257329e-003}]},{\"Assdfdf1399293076\",[{ops,1.628664e-003}]},{\"Assdfdf519476573\",[{ops,1.628664e-003}]},{\"Assdfdf1176328881\",[{ops,1.628664e-003}]},{\"Assdfdf1640362316\",[{ops,3.257329e-003}]},{\"Assdfdf623240301\",[{ops,1.628664e-003}]},{\"Assdfdf820141458\",[{ops,3.257329e-003}]},{\"Assdfdf649173665\",[{ops,1.628664e-003}]},{\"Assdfdf386610799\",[{ops,3.257329e-003}]},{\"Assdfdf883521566\",[{ops,1.628664e-003}]},{\"Assdfdf501805713\",[{ops,1.628664e-003}]},{\"Assdfdf1258722766\",[{ops,1.628664e-003}]},{\"Assdfdf1044122508\",[{ops,1.628664e-003}]},{\"Assdfdf79161977\",[{ops,1.628664e-003}]},{\"Assdfdf1696016621\",[{ops,1.628664e-003}]},{\"Assdfdf457298205\",[{ops,1.628664e-003}]},{\"Assdfdf955039362\",[{ops,1.628664e-003}]},{\"Assdfdf481334689\",[{ops,1.628664e-003}]},{\"Assdfdf1444666503\",[{ops,1.628664e-003}]},{\"Assdfdf1618688479\",[{ops,1.628664e-003}]},{\"Assdfdf1900404596\",[{ops,1.628664e-003}]},{\"Assdfdf1777018795\",[{ops,1.628664e-003}]},{\"Assdfdf1837129101\",[{ops,3.257329e-003}]},{\"Assdfdf1158243624\",[{ops,1.628664e-003}]},{\"Assdfdf141153477\",[{ops,1.628664e-003}]},{\"Assdfdf529733216\",[{ops,1.628664e-003}]},{\"Assdfdf479738390\",[{ops,1.628664e-003}]},{\"Assdfdf1493126454\",[{ops,3.257329e-003}]},{\"Assdfdf1773979199\",[{ops,3.257329e-003}]},{\"Assdfdf1532828493\",[{ops,1.628664e-003}]},{\"Assdfdf1097166904\",[{ops,1.628664e-003}]},{\"Assdfdf98439836\",[{ops,1.628664e-003}]},{\"Assdfdf239225763\",[{ops,3.257329e-003}]},{\"Assdfdf1960523672\",[{ops,1.628664e-003}]},{\"Assdfdf1517893883\",[{ops,1.628664e-003}]},{\"Assdfdf1212798464\",[{ops,1.628664e-003}]},{\"Assdfdf480142376\",[{ops,1.628664e-003}]},{\"Assdfdf693129496\",[{ops,1.628664e-003}]},{\"Assdfdf1569077312\",[{ops,1.628664e-003}]},{\"Assdfdf1293611520\",[{ops,1.628664e-003}]},{\"Assdfdf375268498\",[{ops,1.628664e-003}]},{\"Assdfdf1304300285\",[{ops,1.628664e-003}]},{\"Assdfdf1166283405\",[{ops,3.257329e-003}]},{\"Assdfdf485784388\",[{ops,1.628664e-003}]},{\"Assdfdf2030971413\",[{ops,1.628664e-003}]},{\"Assdfdf858054640\",[{ops,1.628664e-003}]},{\"Assdfdf235877367\",[{ops,1.628664e-003}]},{\"Assdfdf1088912309\",[{ops,1.628664e-003}]},{\"Assdfdf1109305314\",[{ops,1.628664e-003}]},{\"Assdfdf1159322980\",[{ops,1.628664e-003}]},{\"Assdfdf1414423573\",[{ops,3.257329e-003}]},{\"Assdfdf231664654\",[{ops,1.628664e-003}]},{\"Assdfdf1398710701\",[{ops,1.628664e-003}]},{\"Assdfdf1024492891\",[{ops,1.628664e-003}]},{\"Assdfdf1940350266\",[{ops,1.628664e-003}]},{\"Assdfdf188382281\",[{ops,3.257329e-003}]},{\"Assdfdf88545244\",[{ops,1.628664e-003}]},{\"Assdfdf628208533\",[{ops,1.628664e-003}]},{\"Assdfdf308633157\",[{ops,1.628664e-003}]},{\"Assdfdf1737849680\",[{ops,1.628664e-003}]},{\"Assdfdf832446147\",[{ops,1.628664e-003}]}]}],<0.8911.0>}\ny(4) hot_keys_keeper\ny(5) <0.296.0>\n\n0x04567ca4 Return addr 0x00aec194 ()\ny(0) Catch 0x00f3d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,6765}, {total_heap_size,6765}, {links,[<11993.296.0>,<11993.8911.0>,<11993.57.0>]}, {memory,27536}, {message_queue_len,0}, {reductions,778}, {trap_exit,false}]}, {<11993.8906.0>, [{registered_name,[]}, {status,running}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x00e52cd0 (unknown function)\nCP: 0x04a0222c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 56)\n\n0x053b22c4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [<0.8910.0>]\ny(1) <0.8906.0>\n\n0x053b22d0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,hot_keys_keeper},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<5220 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,6765},{total_heap_size,6765},{links,[<0.296.0>,<0.8911.0>,<0.57.0>]},{memory,27536},{message_queue_len,0},{reductions,778},{trap_exit,false}]\ny(1) <0.8890.0>\n\n0x053b22dc Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'ns_memcached-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1159 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,128}]},{heap_size,4181},{total_heap_size,32838},{links,[<0.57.0>,<0.310.0>,#Port<0.5899>]},{memory,132020},{message_queue_len,3},{reductions,251928},{trap_exit,true}]\ny(1) <0.8889.0>\n\n0x053b22e8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<608 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.297.0>]},{memory,1984},{message_queue_len,0},{reductions,18},{trap_exit,false}]\ny(1) <0.8877.0>\n\n0x053b22f4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<584 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,6765},{total_heap_size,6765},{links,[<0.311.0>,#Port<0.5833>]},{memory,27516},{message_queue_len,0},{reductions,243},{trap_exit,true}]\ny(1) <0.8353.0>\n\n0x053b2300 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<3850 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,46368},{total_heap_size,50549},{links,[<0.297.0>,#Port<0.5749>]},{memory,202732},{message_queue_len,0},{reductions,7851284},{trap_exit,false}]\ny(1) <0.7646.0>\n\n0x053b230c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,do_setup,6}},{backtrace,<<452 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,16}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.21.0>,#Port<0.5101>]},{memory,12240},{message_queue_len,0},{reductions,4107},{trap_exit,false}]\ny(1) <0.3488.0>\n\n0x053b2318 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<230 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<11585.152.0>]},{memory,1364},{message_queue_len,0},{reductions,13},{trap_exit,true}]\ny(1) <0.390.0>\n\n0x053b2324 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<334 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.256.0>]},{memory,1348},{message_queue_len,0},{reductions,6},{trap_exit,false}]\ny(1) <0.389.0>\n\n0x053b2330 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_moxi_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<575 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,17711},{total_heap_size,17711},{links,[<0.256.0>,<0.65.0>]},{memory,71300},{message_queue_len,0},{reductions,2259},{trap_exit,true}]\ny(1) <0.388.0>\n\n0x053b233c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'stats_reader-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<414 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,11}]},{heap_size,832040},{total_heap_size,1346269},{links,[<0.347.0>]},{memory,5385512},{message_queue_len,0},{reductions,9117828},{trap_exit,false}]\ny(1) <0.387.0>\n\n0x053b2348 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'stats_archiver-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<418 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,225}]},{heap_size,6765},{total_heap_size,17711},{links,[<0.308.0>,<0.347.0>,<0.57.0>]},{memory,71320},{message_queue_len,0},{reductions,16943121},{trap_exit,false}]\ny(1) <0.349.0>\n\n0x053b2354 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1392 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,4181},{total_heap_size,4181},{links,[<0.347.0>,<0.307.0>]},{memory,17252},{message_queue_len,2},{reductions,19775328},{trap_exit,false}]\ny(1) <0.348.0>\n\n0x053b2360 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_bad_bucket_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1019 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,17711},{total_heap_size,17711},{links,[<0.256.0>,<0.349.0>,<0.387.0>,<0.348.0>,<0.65.0>]},{memory,71360},{message_queue_len,0},{reductions,1559},{trap_exit,true}]\ny(1) <0.347.0>\n\n0x053b236c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_bad_bucket_worker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<395 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1597},{links,[<0.256.0>]},{memory,6824},{message_queue_len,0},{reductions,291},{trap_exit,false}]\ny(1) <0.346.0>\n\n0x053b2378 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<635 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,40}]},{heap_size,1597},{total_heap_size,3194},{links,[<0.130.0>,<0.322.0>,<0.129.0>,#Port<0.5860>]},{memory,13272},{message_queue_len,0},{reductions,333486},{trap_exit,true}]\ny(1) <0.333.0>\n\n0x053b2384 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_late_loader},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<536 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.321.0>]},{memory,1944},{message_queue_len,0},{reductions,178},{trap_exit,false}]\ny(1) <0.330.0>\n\n0x053b2390 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_controller},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<497 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,610},{total_heap_size,987},{links,[<0.321.0>,<0.57.0>]},{memory,4404},{message_queue_len,0},{reductions,1047},{trap_exit,true}]\ny(1) <0.329.0>\n\n0x053b239c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_snmp_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<728 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.321.0>]},{memory,1368},{message_queue_len,0},{reductions,61},{trap_exit,true}]\ny(1) <0.328.0>\n\n0x053b23a8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_checkpoint_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<760 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.321.0>]},{memory,1368},{message_queue_len,0},{reductions,61},{trap_exit,true}]\ny(1) <0.327.0>\n\n0x053b23b4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_tm},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<626 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,940}]},{heap_size,610},{total_heap_size,987},{links,[<0.321.0>]},{memory,4384},{message_queue_len,0},{reductions,1031669},{trap_exit,true}]\ny(1) <0.326.0>\n\n0x053b23c0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_recover},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<448 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,9}]},{heap_size,377},{total_heap_size,754},{links,[<0.321.0>,<0.57.0>]},{memory,3472},{message_queue_len,0},{reductions,6163},{trap_exit,true}]\ny(1) <0.325.0>\n\n0x053b23cc Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_locker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<562 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,949}]},{heap_size,1597},{total_heap_size,1974},{links,[<0.321.0>]},{memory,8332},{message_queue_len,0},{reductions,450911},{trap_exit,true}]\ny(1) <0.324.0>\n\n0x053b23d8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_subscr},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<412 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.320.0>,<0.321.0>,<0.314.0>]},{memory,1408},{message_queue_len,0},{reductions,111},{trap_exit,true}]\ny(1) <0.323.0>\n\n0x053b23e4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_monitor},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<435 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,59}]},{heap_size,2584},{total_heap_size,3194},{links,[<0.333.0>,<0.321.0>]},{memory,13232},{message_queue_len,0},{reductions,9210},{trap_exit,true}]\ny(1) <0.322.0>\n\n0x053b23f0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_kernel_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1660 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,7}]},{heap_size,377},{total_heap_size,754},{links,[<0.324.0>,<0.328.0>,<0.329.0>,<0.330.0>,<0.326.0>,<0.327.0>,<0.325.0>,<0.322.0>,<0.323.0>,<0.319.0>]},{memory,3632},{message_queue_len,0},{reductions,551},{trap_exit,true}]\ny(1) <0.321.0>\n\n0x053b23fc Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<421 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,987},{total_heap_size,1597},{links,[<0.319.0>,<0.323.0>]},{memory,6844},{message_queue_len,0},{reductions,428},{trap_exit,true}]\ny(1) <0.320.0>\n\n0x053b2408 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<807 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<0.320.0>,<0.321.0>,<0.318.0>]},{memory,3492},{message_queue_len,0},{reductions,198},{trap_exit,true}]\ny(1) <0.319.0>\n\n0x053b2414 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<244 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.317.0>,<0.319.0>]},{memory,1328},{message_queue_len,0},{reductions,32},{trap_exit,true}]\ny(1) <0.318.0>\n\n0x053b2420 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1045 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1597},{links,[<0.7.0>,<0.318.0>]},{memory,6844},{message_queue_len,0},{reductions,81},{trap_exit,true}]\ny(1) <0.317.0>\n\n0x053b242c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<230 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<11585.110.0>]},{memory,1364},{message_queue_len,0},{reductions,13},{trap_exit,true}]\ny(1) <0.315.0>\n\n0x053b2438 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_mnesia},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<388 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,1597},{total_heap_size,2584},{links,[<0.323.0>,<0.256.0>]},{memory,10792},{message_queue_len,0},{reductions,1477},{trap_exit,true}]\ny(1) <0.314.0>\n\n0x053b2444 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<342 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.256.0>]},{memory,1348},{message_queue_len,0},{reductions,6},{trap_exit,false}]\ny(1) <0.313.0>\n\n0x053b2450 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'ns_vbm_sup-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<6131 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,28657},{total_heap_size,75025},{links,[<0.310.0>,<0.8353.0>]},{memory,300556},{message_queue_len,0},{reductions,19241},{trap_exit,true}]\ny(1) <0.311.0>\n\n0x053b245c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_good_bucket_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<915 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,17711},{total_heap_size,17711},{links,[<0.256.0>,<0.311.0>,<0.8889.0>,<0.65.0>]},{memory,71340},{message_queue_len,0},{reductions,2376},{trap_exit,true}]\ny(1) <0.310.0>\n\n0x053b2468 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_good_bucket_worker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<396 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,233},{total_heap_size,610},{links,[<0.256.0>]},{memory,2876},{message_queue_len,0},{reductions,239},{trap_exit,false}]\ny(1) <0.309.0>\n\n0x053b2474 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_stats_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<458 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1064}]},{heap_size,1597},{total_heap_size,1974},{links,[<0.349.0>,<0.256.0>]},{memory,8352},{message_queue_len,0},{reductions,123102},{trap_exit,true}]\ny(1) <0.308.0>\n\n0x053b2480 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_tick_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<457 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,169}]},{heap_size,610},{total_heap_size,987},{links,[<0.256.0>,<0.348.0>]},{memory,4404},{message_queue_len,0},{reductions,35369},{trap_exit,true}]\ny(1) <0.307.0>\n\n0x053b248c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<560 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,63}]},{heap_size,2584},{total_heap_size,4181},{links,[<0.305.0>,#Port<0.4133>]},{memory,17180},{message_queue_len,0},{reductions,37579},{trap_exit,true}]\ny(1) <0.306.0>\n\n0x053b2498 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<442 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,987},{total_heap_size,987},{links,[<0.301.0>,<0.306.0>]},{memory,4404},{message_queue_len,0},{reductions,55},{trap_exit,true}]\ny(1) <0.305.0>\n\n0x053b24a4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<588 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,2584},{total_heap_size,5168},{links,[<0.303.0>,#Port<0.4132>]},{memory,21128},{message_queue_len,0},{reductions,305},{trap_exit,true}]\ny(1) <0.304.0>\n\n0x053b24b0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<437 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,1597},{total_heap_size,1597},{links,[<0.301.0>,<0.304.0>]},{memory,6844},{message_queue_len,0},{reductions,149},{trap_exit,true}]\ny(1) <0.303.0>\n\n0x053b24bc Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_port_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<2878 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,46368},{total_heap_size,75025},{links,[<0.303.0>,<0.305.0>,<0.256.0>]},{memory,300576},{message_queue_len,0},{reductions,7580},{trap_exit,true}]\ny(1) <0.301.0>\n\n0x053b24c8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<16473 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,116}]},{heap_size,46368},{total_heap_size,242786},{links,[<0.297.0>,#Port<0.4276>]},{memory,971680},{message_queue_len,0},{reductions,13659074},{trap_exit,false}]\ny(1) <0.298.0>\n\n0x053b24d4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,menelaus_web},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<523 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,12}]},{heap_size,610},{total_heap_size,1220},{links,[<0.298.0>,<0.7646.0>,<0.8877.0>,<0.296.0>,#Port<0.4131>]},{memory,5396},{message_queue_len,0},{reductions,2411},{trap_exit,true}]\ny(1) <0.297.0>\n\n0x053b24e0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,menelaus_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<877 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,4181},{total_heap_size,32838},{links,[<0.297.0>,<0.8890.0>,<0.256.0>]},{memory,131828},{message_queue_len,0},{reductions,5636},{trap_exit,true}]\ny(1) <0.296.0>\n\n0x053b24ec Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_doctor},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<3449 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1065}]},{heap_size,6765},{total_heap_size,9349},{links,[<0.256.0>,<0.57.0>]},{memory,37852},{message_queue_len,0},{reductions,557146},{trap_exit,false}]\ny(1) <0.280.0>\n\n0x053b24f8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_heart},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<751 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,21}]},{heap_size,1597},{total_heap_size,4181},{links,[<0.256.0>,<0.57.0>]},{memory,17180},{message_queue_len,0},{reductions,5893857},{trap_exit,false}]\ny(1) <0.279.0>\n\n0x053b2504 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config_rep},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<396 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,39}]},{heap_size,46368},{total_heap_size,75025},{links,[<0.262.0>]},{memory,300536},{message_queue_len,0},{reductions,60746},{trap_exit,false}]\ny(1) <0.273.0>\n\n0x053b2510 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,do_setup,6}},{backtrace,<<453 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,42}]},{heap_size,233},{total_heap_size,610},{links,[<0.21.0>,#Port<0.4125>]},{memory,2836},{message_queue_len,0},{reductions,5507},{trap_exit,false}]\ny(1) <0.267.0>\n\n0x053b251c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_node_disco},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<480 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1920}]},{heap_size,46368},{total_heap_size,53133},{links,[<0.262.0>,<0.57.0>]},{memory,212988},{message_queue_len,0},{reductions,2488249},{trap_exit,false}]\ny(1) <0.264.0>\n\n0x053b2528 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_node_disco_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<615 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,615}]},{heap_size,233},{total_heap_size,377},{links,[<0.262.0>]},{memory,2024},{message_queue_len,0},{reductions,26883},{trap_exit,true}]\ny(1) <0.263.0>\n\n0x053b2534 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_node_disco_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1094 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,233},{total_heap_size,610},{links,[<0.264.0>,<0.273.0>,<0.263.0>,<0.256.0>]},{memory,2936},{message_queue_len,0},{reductions,805},{trap_exit,true}]\ny(1) <0.262.0>\n\n0x053b2540 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_mail},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<388 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.259.0>]},{memory,1368},{message_queue_len,0},{reductions,27},{trap_exit,true}]\ny(1) <0.260.0>\n\n0x053b254c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_mail_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<746 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<0.260.0>,<0.256.0>]},{memory,2896},{message_queue_len,0},{reductions,664},{trap_exit,true}]\ny(1) <0.259.0>\n\n0x053b2558 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_log_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<223 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,16}]},{heap_size,34},{total_heap_size,34},{links,[<0.256.0>]},{memory,572},{message_queue_len,0},{reductions,10081},{trap_exit,true}]\ny(1) <0.258.0>\n\n0x053b2564 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_log},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<7554 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,21}]},{heap_size,4181},{total_heap_size,21892},{links,[<0.57.0>,<0.256.0>]},{memory,88024},{message_queue_len,0},{reductions,69567},{trap_exit,false}]\ny(1) <0.257.0>\n\n0x053b2570 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_server_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<2546 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,46368},{total_heap_size,121393},{links,[<0.296.0>,<0.309.0>,<0.346.0>,<0.388.0>,<0.389.0>,<0.347.0>,<0.313.0>,<0.314.0>,<0.310.0>,<0.307.0>,<0.308.0>,<0.301.0>,<0.259.0>,<0.279.0>,<0.280.0>,<0.262.0>,<0.257.0>,<0.258.0>,<0.61.0>]},{memory,486368},{message_queue_len,0},{reductions,55933},{trap_exit,true}]\ny(1) <0.256.0>\n\n0x053b257c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,tftp_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<570 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.239.0>]},{memory,1368},{message_queue_len,0},{reductions,46},{trap_exit,true}]\ny(1) <0.246.0>\n\n0x053b2588 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,httpd_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<573 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.239.0>]},{memory,1368},{message_queue_len,0},{reductions,45},{trap_exit,true}]\ny(1) <0.245.0>\n\n0x053b2594 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,httpc_handler_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<696 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,987},{total_heap_size,987},{links,[<0.241.0>]},{memory,4384},{message_queue_len,0},{reductions,178},{trap_exit,true}]\ny(1) <0.244.0>\n\n0x053b25a0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,httpc_manager},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<538 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,987},{total_heap_size,987},{links,[<0.242.0>]},{memory,4384},{message_queue_len,0},{reductions,150},{trap_exit,true}]\ny(1) <0.243.0>\n\n0x053b25ac Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,httpc_profile_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<764 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.241.0>,<0.243.0>]},{memory,1388},{message_queue_len,0},{reductions,124},{trap_exit,true}]\ny(1) <0.242.0>\n\n0x053b25b8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,httpc_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<890 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<0.242.0>,<0.244.0>,<0.239.0>]},{memory,2916},{message_queue_len,0},{reductions,175},{trap_exit,true}]\ny(1) <0.241.0>\n\n0x053b25c4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ftp_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<646 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.239.0>]},{memory,1368},{message_queue_len,0},{reductions,54},{trap_exit,true}]\ny(1) <0.240.0>\n\n0x053b25d0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,inets_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<989 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,377},{total_heap_size,754},{links,[<0.240.0>,<0.245.0>,<0.246.0>,<0.241.0>,<0.238.0>]},{memory,3532},{message_queue_len,0},{reductions,320},{trap_exit,true}]\ny(1) <0.239.0>\n\n0x053b25dc Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<234 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.237.0>,<0.239.0>]},{memory,1328},{message_queue_len,0},{reductions,42},{trap_exit,true}]\ny(1) <0.238.0>\n\n0x053b25e8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1330 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.7.0>,<0.238.0>]},{memory,1964},{message_queue_len,0},{reductions,42},{trap_exit,true}]\ny(1) <0.237.0>\n\n0x053b25f4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,dets},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<404 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,10}]},{heap_size,610},{total_heap_size,1597},{links,[<0.34.0>]},{memory,6824},{message_queue_len,0},{reductions,1171},{trap_exit,true}]\ny(1) <0.135.0>\n\n0x053b2600 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,dets_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<647 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,7}]},{heap_size,377},{total_heap_size,987},{links,[<0.34.0>]},{memory,4384},{message_queue_len,0},{reductions,720},{trap_exit,true}]\ny(1) <0.134.0>\n\n0x053b260c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,disk_log_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<402 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,40}]},{heap_size,1597},{total_heap_size,2584},{links,[<0.333.0>,<0.34.0>]},{memory,10792},{message_queue_len,0},{reductions,8557},{trap_exit,true}]\ny(1) <0.130.0>\n\n0x053b2618 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,disk_log_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<691 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,23}]},{heap_size,1597},{total_heap_size,2207},{links,[<0.333.0>,<0.34.0>]},{memory,9284},{message_queue_len,0},{reductions,7154},{trap_exit,true}]\ny(1) <0.129.0>\n\n0x053b2624 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<44579 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,75025},{total_heap_size,150050},{links,[<0.64.0>]},{memory,600636},{message_queue_len,0},{reductions,693453},{trap_exit,false}]\ny(1) <0.66.0>\n\n0x053b2630 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1549 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,32}]},{heap_size,987},{total_heap_size,2584},{links,[<0.310.0>,<0.347.0>,<0.388.0>,<0.64.0>]},{memory,10912},{message_queue_len,0},{reductions,679812},{trap_exit,true}]\ny(1) <0.65.0>\n\n0x053b263c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1004 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,6765},{total_heap_size,7142},{links,[<0.65.0>,<0.66.0>,<0.61.0>]},{memory,29044},{message_queue_len,0},{reductions,1037},{trap_exit,true}]\ny(1) <0.64.0>\n\n0x053b2648 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_cluster},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<389 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,32}]},{heap_size,6765},{total_heap_size,8362},{links,[<0.61.0>]},{memory,33884},{message_queue_len,0},{reductions,13731},{trap_exit,false}]\ny(1) <0.63.0>\n\n0x053b2654 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,dist_manager},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<411 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.61.0>]},{memory,1368},{message_queue_len,0},{reductions,132},{trap_exit,false}]\ny(1) <0.62.0>\n\n0x053b2660 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_server_cluster_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1185 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,377},{total_heap_size,987},{links,[<0.62.0>,<0.64.0>,<0.256.0>,<0.63.0>,<0.60.0>]},{memory,4464},{message_queue_len,0},{reductions,2794},{trap_exit,true}]\ny(1) <0.61.0>\n\n0x053b266c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<232 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.59.0>,<0.61.0>]},{memory,1328},{message_queue_len,0},{reductions,50},{trap_exit,true}]\ny(1) <0.60.0>\n\n0x053b2678 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<739 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.7.0>,<0.60.0>]},{memory,1964},{message_queue_len,0},{reductions,46},{trap_exit,true}]\ny(1) <0.59.0>\n\n0x053b2684 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,timer_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<375 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1048}]},{heap_size,1597},{total_heap_size,1974},{links,[<0.279.0>,<0.329.0>,<0.8889.0>,<0.8890.0>,<0.349.0>,<0.280.0>,<0.325.0>,<0.257.0>,<0.264.0>,<0.34.0>]},{memory,8512},{message_queue_len,0},{reductions,298710},{trap_exit,true}]\ny(1) <0.57.0>\n\n0x053b2690 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,memsup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<525 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,19}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.52.0>]},{memory,12280},{message_queue_len,0},{reductions,131412},{trap_exit,true}]\ny(1) <0.55.0>\n\n0x053b269c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,disksup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<473 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,19}]},{heap_size,377},{total_heap_size,2961},{links,[<0.52.0>]},{memory,12280},{message_queue_len,0},{reductions,57292},{trap_exit,true}]\ny(1) <0.54.0>\n\n0x053b26a8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,os_mon_sysinfo},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<411 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,29}]},{heap_size,1597},{total_heap_size,2207},{links,[<0.52.0>,#Port<0.1438>]},{memory,9284},{message_queue_len,0},{reductions,6665},{trap_exit,true}]\ny(1) <0.53.0>\n\n0x053b26b4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,os_mon_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<828 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<0.53.0>,<0.54.0>,<0.55.0>,<0.51.0>]},{memory,3512},{message_queue_len,0},{reductions,274},{trap_exit,true}]\ny(1) <0.52.0>\n\n0x053b26c0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<229 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.50.0>,<0.52.0>]},{memory,1328},{message_queue_len,0},{reductions,40},{trap_exit,true}]\ny(1) <0.51.0>\n\n0x053b26cc Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<568 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.7.0>,<0.51.0>]},{memory,1388},{message_queue_len,0},{reductions,23},{trap_exit,true}]\ny(1) <0.50.0>\n\n0x053b26d8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<351 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.32.0>]},{memory,1348},{message_queue_len,0},{reductions,14},{trap_exit,false}]\ny(1) <0.48.0>\n\n0x053b26e4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<223 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.32.0>]},{memory,1308},{message_queue_len,0},{reductions,8},{trap_exit,false}]\ny(1) <0.47.0>\n\n0x053b26f0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,release_handler},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<645 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,610},{total_heap_size,987},{links,[<0.41.0>]},{memory,4384},{message_queue_len,0},{reductions,1249},{trap_exit,false}]\ny(1) <0.45.0>\n\n0x053b26fc Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,overload},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<433 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.42.0>]},{memory,1368},{message_queue_len,0},{reductions,39},{trap_exit,false}]\ny(1) <0.44.0>\n\n0x053b2708 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,alarm_handler},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<438 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.42.0>]},{memory,1368},{message_queue_len,0},{reductions,43},{trap_exit,true}]\ny(1) <0.43.0>\n\n0x053b2714 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,sasl_safe_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<748 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<0.43.0>,<0.44.0>,<0.41.0>]},{memory,2916},{message_queue_len,0},{reductions,174},{trap_exit,true}]\ny(1) <0.42.0>\n\n0x053b2720 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,sasl_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<774 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<0.42.0>,<0.45.0>,<0.40.0>]},{memory,2916},{message_queue_len,0},{reductions,158},{trap_exit,true}]\ny(1) <0.41.0>\n\n0x053b272c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<246 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.39.0>,<0.41.0>]},{memory,1328},{message_queue_len,0},{reductions,70},{trap_exit,true}]\ny(1) <0.40.0>\n\n0x053b2738 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<700 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.7.0>,<0.40.0>]},{memory,1388},{message_queue_len,0},{reductions,23},{trap_exit,true}]\ny(1) <0.39.0>\n\n0x053b2744 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,kernel_safe_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1044 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,610},{total_heap_size,987},{links,[<0.130.0>,<0.134.0>,<0.135.0>,<0.57.0>,<0.129.0>,<0.11.0>]},{memory,4484},{message_queue_len,0},{reductions,388},{trap_exit,true}]\ny(1) <0.34.0>\n\n0x053b2750 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<385 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.11.0>]},{memory,1368},{message_queue_len,0},{reductions,268},{trap_exit,true}]\ny(1) <0.33.0>\n\n0x053b275c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<404 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,2584},{total_heap_size,20295},{links,[<0.47.0>,<0.48.0>,<0.31.0>]},{memory,81656},{message_queue_len,0},{reductions,5170},{trap_exit,true}]\ny(1) <0.32.0>\n\n0x053b2768 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,user},{status,waiting},{initial_call,{user,server,2}},{backtrace,<<728 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,35}]},{heap_size,1597},{total_heap_size,5778},{links,[<0.29.0>,<0.32.0>,#Port<0.830>,<0.6.0>]},{memory,23648},{message_queue_len,0},{reductions,40326},{trap_exit,true}]\ny(1) <0.31.0>\n\n0x053b2774 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<441 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,1597},{total_heap_size,1597},{links,[<0.11.0>,<0.31.0>]},{memory,6844},{message_queue_len,0},{reductions,166},{trap_exit,true}]\ny(1) <0.29.0>\n\n0x053b2780 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,standard_error},{status,waiting},{initial_call,{standard_error,server,2}},{backtrace,<<187 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.27.0>,#Port<0.792>]},{memory,1388},{message_queue_len,0},{reductions,7},{trap_exit,true}]\ny(1) <0.28.0>\n\n0x053b278c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,standard_error_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<464 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.11.0>,<0.28.0>]},{memory,1388},{message_queue_len,0},{reductions,40},{trap_exit,true}]\ny(1) <0.27.0>\n\n0x053b2798 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,code_server},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<2875 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,350}]},{heap_size,4181},{total_heap_size,21892},{links,[<0.11.0>]},{memory,87944},{message_queue_len,0},{reductions,245222},{trap_exit,true}]\ny(1) <0.26.0>\n\n0x053b27a4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,file_server_2},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<398 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,587}]},{heap_size,1597},{total_heap_size,3194},{links,[#Port<0.496>,<0.11.0>]},{memory,13232},{message_queue_len,0},{reductions,3422635},{trap_exit,true}]\ny(1) <0.25.0>\n\n0x053b27b0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,global_group},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<456 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.11.0>]},{memory,1368},{message_queue_len,0},{reductions,76},{trap_exit,true}]\ny(1) <0.24.0>\n\n0x053b27bc Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{net_kernel,ticker,2}},{backtrace,<<194 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.21.0>]},{memory,1308},{message_queue_len,0},{reductions,433},{trap_exit,false}]\ny(1) <0.23.0>\n\n0x053b27c8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,accept_loop,2}},{backtrace,<<387 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,137}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.21.0>]},{memory,12260},{message_queue_len,0},{reductions,121386},{trap_exit,false}]\ny(1) <0.22.0>\n\n0x053b27d4 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,net_kernel},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<652 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,106}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.23.0>,<0.267.0>,<0.3488.0>,<0.18.0>,<0.22.0>,#Port<0.460>]},{memory,12396},{message_queue_len,0},{reductions,33168},{trap_exit,true}]\ny(1) <0.21.0>\n\n0x053b27e0 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,auth},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<397 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,26}]},{heap_size,377},{total_heap_size,754},{links,[<0.18.0>]},{memory,3452},{message_queue_len,0},{reductions,3702},{trap_exit,true}]\ny(1) <0.20.0>\n\n0x053b27ec Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,erl_epmd},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<409 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.18.0>,#Port<0.473>]},{memory,1388},{message_queue_len,0},{reductions,135},{trap_exit,false}]\ny(1) <0.19.0>\n\n0x053b27f8 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,net_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<870 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,987},{links,[<0.19.0>,<0.20.0>,<0.21.0>,<0.11.0>]},{memory,4444},{message_queue_len,0},{reductions,265},{trap_exit,true}]\ny(1) <0.18.0>\n\n0x053b2804 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,inet_db},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<498 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,37}]},{heap_size,377},{total_heap_size,754},{links,[<0.11.0>]},{memory,3452},{message_queue_len,0},{reductions,3586},{trap_exit,true}]\ny(1) <0.17.0>\n\n0x053b2810 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<176 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,610},{total_heap_size,987},{links,[<0.13.0>]},{memory,4324},{message_queue_len,0},{reductions,249},{trap_exit,false}]\ny(1) <0.16.0>\n\n0x053b281c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<297 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,377},{total_heap_size,754},{links,[<0.13.0>]},{memory,3392},{message_queue_len,0},{reductions,232},{trap_exit,false}]\ny(1) <0.15.0>\n\n0x053b2828 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<339 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,610},{total_heap_size,987},{links,[<0.13.0>]},{memory,4324},{message_queue_len,0},{reductions,312},{trap_exit,true}]\ny(1) <0.14.0>\n\n0x053b2834 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,global_name_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<538 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,347}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.14.0>,<0.16.0>,<0.15.0>,<0.11.0>]},{memory,12452},{message_queue_len,0},{reductions,86955},{trap_exit,true}]\ny(1) <0.13.0>\n\n0x053b2840 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,rex},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<453 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,349}]},{heap_size,987},{total_heap_size,1364},{links,[<0.11.0>]},{memory,5996},{message_queue_len,0},{reductions,63716},{trap_exit,true}]\ny(1) <0.12.0>\n\n0x053b284c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,kernel_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1623 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,4181},{total_heap_size,8362},{links,[<0.26.0>,<0.29.0>,<0.33.0>,<0.34.0>,<0.27.0>,<0.17.0>,<0.24.0>,<0.25.0>,<0.18.0>,<0.12.0>,<0.13.0>,<0.10.0>]},{memory,34104},{message_queue_len,0},{reductions,3131},{trap_exit,true}]\ny(1) <0.11.0>\n\n0x053b2858 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<228 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.9.0>,<0.11.0>]},{memory,1328},{message_queue_len,0},{reductions,72},{trap_exit,true}]\ny(1) <0.10.0>\n\n0x053b2864 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1414 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<0.7.0>,<0.10.0>]},{memory,3472},{message_queue_len,0},{reductions,44},{trap_exit,true}]\ny(1) <0.9.0>\n\n0x053b2870 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,application_controller},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<566 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,32}]},{heap_size,4181},{total_heap_size,21892},{links,[<0.50.0>,<0.237.0>,<0.317.0>,<0.59.0>,<0.9.0>,<0.39.0>,<0.0.0>]},{memory,88124},{message_queue_len,0},{reductions,43037},{trap_exit,true}]\ny(1) <0.7.0>\n\n0x053b287c Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,error_logger},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<552 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,43}]},{heap_size,2584},{total_heap_size,20295},{links,[<0.0.0>,<0.31.0>,#Port<0.1578>]},{memory,81656},{message_queue_len,0},{reductions,388522},{trap_exit,true}]\ny(1) <0.6.0>\n\n0x053b2888 Return addr 0x04a0224c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,erl_prim_loader},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<620 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,519}]},{heap_size,1597},{total_heap_size,12543},{links,[#Port<0.1>,<0.0.0>]},{memory,50568},{message_queue_len,0},{reductions,1323920},{trap_exit,true}]\ny(1) <0.3.0>\n\n0x053b2894 Return addr 0x04a01434 (diag_handler:do_diag_per_node/0 + 112)\ny(0) [{registered_name,init},{status,waiting},{initial_call,{otp_ring0,start,2}},{backtrace,<<830 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,233}]},{heap_size,1597},{total_heap_size,3194},{links,[<0.6.0>,<0.7.0>,<0.3.0>]},{memory,13192},{message_queue_len,0},{reductions,59884},{trap_exit,true}]\ny(1) <0.0.0>\n\n0x053b28a0 Return addr 0x0327a77c (rpc:'-handle_call_call/6-fun-0-'/5 + 104)\ny(0) []\ny(1) []\ny(2) [{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{inets,\"5.2\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,3232},{memory_data,{4284698624,4210270208,{<0.387.0>,5385512}}},{disk_data,[{\"C:\\\",46243100,46},{\"D:\\\",51809624,0},{\"G:\\\",33929248,18}]}]\ny(3) [{{node,'ns_1@10.2.1.101',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.102',memcached},[{port,11210},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{otp,[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{cookie,pmqchiglstnppkwf}]},{memory_quota,3268},{{node,'ns_1@10.2.1.102',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.100',membership},active},{rebalance_status,{none,<<76 bytes>>}},{{node,'ns_1@10.2.1.101',membership},active},{rest_creds,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{creds,[{\"Administrator\",[{password,'filtered-out'}]}]}]},{buckets,[{'_vclock',[{'ns_1@10.2.1.100',{9,63461309965}}]},{configs,[{\"default\",[{num_replicas,1},{ram_quota,3426746368},{auth_type,sasl},{sasl_password,[]},{type,membase},{num_vbuckets,1024},{ht_size,3079},{tap_keepalive,0},{tap_noop_interval,20},{max_txn_size,1000},{ht_locks,5},{servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]}]},{port_servers,[{moxi,\"./bin/moxi/moxi\",[\"-Z\",{\"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",[port]},\"-z\",{\"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming\",[{rest,port}]},\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",{\"~s\",[{ns_moxi_sup,rest_user,[]}]}},{\"MOXI_SASL_PLAIN_PWD\",{\"~s\",[{ns_moxi_sup,rest_pass,[]}]}}]},use_stdio,stderr_to_stdout,stream]},{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",{\"~B\",[port]},\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",{\"admin=~s;default_bucket_name=default;auto_create=false\",[admin_user]},{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",{\"~s\",[{isasl,path}]}},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]}]},{alerts,[{email,[]},{email_alerts,false},{email_server,[{user,undefined},{pass,'filtered-out'},{addr,undefined},{port,undefined},{encrypt,false}]},{alerts,[server_down,server_unresponsive,server_up,server_joined,server_left,bucket_created,bucket_deleted,bucket_auth_failed]}]},{nodes_wanted,[{'_vclock',[{'ns_1@10.2.1.100',{2,63461308289}}]},'ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{rest,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{port,8091}]},{{node,'ns_1@10.2.1.102',membership},active},{{node,'ns_1@10.2.1.100',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.101',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.102',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{moxi,[{port,11211},{verbosity,[]}]},{replication,[{enabled,true}]},{{node,'ns_1@10.2.1.100',memcached},[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307259}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.101',memcached},[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.100',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]}]\ny(4) [\"bucket_engine_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r\",\"curl-7.21.1-w64_patched.tar.gz\r\",\"ep-engine_1.6.5r_4_g9d25ede-MINGW32_NT-6.0.i686.tar.gz\r\",\"libconflate_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"libevent-2.0.7-rc.tar.gz\r\",\"libmemcached-0.41_trond-norbye_mingw32-revno895.tar.gz\r\",\"libvbucket_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"membase-cli_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"memcached_1.4.4_359_g06c7d3b-MINGW32_NT-6.0.i686.tar.gz\r\",\"moxi_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"ns_server_1.6.5r.tar.gz\r\",\"pthreads-w64-2-8-0-release.tar.gz\r\",\"vbucketmigrator_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r\",\"wallace_1.6.5r-2-gc6cf01c-win64-201012280140\r\"]\ny(5) [{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{inets,\"5.2\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]\n\n0x053b28bc Return addr 0x00aec194 ()\ny(0) Catch 0x0327a77c (rpc:'-handle_call_call/6-fun-0-'/5 + 104)\ny(1) []\ny(2) []\ny(3) []\ny(4) <0.12.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,28657}, {total_heap_size,28657}, {links,[]}, {memory,115024}, {message_queue_len,0}, {reductions,18492}, {trap_exit,false}]}, {<11993.8910.0>,undefined}]}, {memory,{4284698624,4210270208,{<11993.387.0>,5385512}}}, {disk, [{"C:\\",46243100,46}, {"D:\\",51809624,0}, {"G:\\",33929248,18}]}]}, {'ns_1@10.2.1.102', [{version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {manifest, ["bucket_engine_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r", "curl-7.21.1-w64_patched.tar.gz\r", "ep-engine_1.6.5r_4_g9d25ede-MINGW32_NT-6.0.i686.tar.gz\r", "libconflate_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "libevent-2.0.7-rc.tar.gz\r", "libmemcached-0.41_trond-norbye_mingw32-revno895.tar.gz\r", "libvbucket_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "membase-cli_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "memcached_1.4.4_359_g06c7d3b-MINGW32_NT-6.0.i686.tar.gz\r", "moxi_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r", "ns_server_1.6.5r.tar.gz\r", "pthreads-w64-2-8-0-release.tar.gz\r", "vbucketmigrator_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r", "wallace_1.6.5r-2-gc6cf01c-win64-201012280140\r"]}, {config, [{{node,'ns_1@10.2.1.101',ns_log}, [{filename, "c:/Program Files/Membase/Server/data/ns_1/ns_log"}]}, {{node,'ns_1@10.2.1.102',memcached}, [{port,11210}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines, [{membase, [{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached, [{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}]}, {otp, [{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]}, {cookie,pmqchiglstnppkwf}]}, {memory_quota,3268}, {{node,'ns_1@10.2.1.102',ns_log}, [{filename, "c:/Program Files/Membase/Server/data/ns_1/ns_log"}]}, {{node,'ns_1@10.2.1.100',membership},active}, {rebalance_status, {none, <<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>}}, {{node,'ns_1@10.2.1.101',membership},active}, {rest_creds, [{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]}, {creds, [{"Administrator",[{password,'filtered-out'}]}]}]}, {buckets, [{'_vclock',[{'ns_1@10.2.1.100',{9,63461309965}}]}, {configs, [{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers, ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102']}, {map, [['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]}]}, {port_servers, [{moxi,"./bin/moxi/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,stderr_to_stdout,stream]}, {memcached,"./bin/memcached/memcached", ["-X","./bin/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","./bin/bucket_engine/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,stream]}]}, {alerts, [{email,[]}, {email_alerts,false}, {email_server, [{user,undefined}, {pass,'filtered-out'}, {addr,undefined}, {port,undefined}, {encrypt,false}]}, {alerts, [server_down,server_unresponsive,server_up, server_joined,server_left,bucket_created, bucket_deleted,bucket_auth_failed]}]}, {nodes_wanted, [{'_vclock',[{'ns_1@10.2.1.100',{2,63461308289}}]}, 'ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']}, {rest, [{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]}, {port,8091}]}, {{node,'ns_1@10.2.1.102',membership},active}, {{node,'ns_1@10.2.1.100',isasl}, [{path, "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}]}, {{node,'ns_1@10.2.1.101',isasl}, [{path, "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}]}, {{node,'ns_1@10.2.1.102',isasl}, [{path, "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}]}, {moxi,[{port,11211},{verbosity,[]}]}, {replication,[{enabled,true}]}, {{node,'ns_1@10.2.1.100',memcached}, [{'_vclock',[{'ns_1@10.2.1.100',{1,63461307259}}]}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines, [{membase, [{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached, [{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}]}, {{node,'ns_1@10.2.1.101',memcached}, [{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines, [{membase, [{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached, [{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}]}, {{node,'ns_1@10.2.1.100',ns_log}, [{filename, "c:/Program Files/Membase/Server/data/ns_1/ns_log"}]}]}, {basic_info, [{version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2218}, {memory_data, {4284698624,3351322624,{<10870.307.0>,4114268}}}, {disk_data, [{"C:\\",49423972,41}, {"D:\\",52797620,0}, {"G:\\",34724465,17}]}]}, {processes, [{<10870.0.0>, [{registered_name,init}, {status,waiting}, {initial_call,{otp_ring0,start,2}}, {backtrace, <<"Program counter: 0x00e0fcb8 (init:loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x055decec Return addr 0x00b1c194 ()\ny(0) {state,[{'-root',[<<37 bytes>>]},{'-progname',[<<3 bytes>>]},{'-home',[<<10 bytes>>]},{'-name',[<<15 bytes>>]},{'-pa',[<<18 bytes>>]},{'-pa',[<<29 bytes>>]},{'-pa',[<<32 bytes>>]},{'-pa',[<<46 bytes>>]},{'-pa',[<<32 bytes>>]},{'-setcookie',[<<8 bytes>>]},{'-ns_server',[<<19 bytes>>,<<6 bytes>>]},{'-ns_server',[<<24 bytes>>,<<8 bytes>>]},{'-ns_server',[<<24 bytes>>,<<2 bytes>>]},{'-kernel',[<<20 bytes>>,<<5 bytes>>,<<20 bytes>>,<<5 bytes>>]},{'-ns_server',[<<14 bytes>>,<<32 bytes>>]}],[],[[ns_bootstrap,override_resolver]],[{application_controller,<0.7.0>},{error_logger,<0.6.0>},{erl_prim_loader,<0.3.0>}],<0.2.0>,{started,started},{\"OTP APN 181 01\",\"R13B03\"},[],[]}\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,91}]}, {heap_size,1597}, {total_heap_size,3194}, {links,[<10870.6.0>,<10870.7.0>,<10870.3.0>]}, {memory,13192}, {message_queue_len,0}, {reductions,27650}, {trap_exit,true}]}, {<10870.3.0>, [{registered_name,erl_prim_loader}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x00e3fd88 (erl_prim_loader:loop/3 + 92)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04fdf654 Return addr 0x00b1c194 ()\ny(0) []\ny(1) [\"bin/ns_server/deps/gen_smtp/ebin\",\"bin/ns_server/deps/menelaus/deps/mochiweb/ebin\",\"bin/ns_server/deps/menelaus/ebin\",\"bin/ns_server/deps/emoxi/ebin\",\"bin/ns_server/ebin\",\"C:\\PROGRA~1\\Membase\\Server\\bin\\erlang/lib/kernel-2.13.4/ebin\",\"C:\\PROGRA~1\\Membase\\Server\\bin\\erlang/lib/stdlib-1.16.4/ebin\"]\ny(2) <0.2.0>\ny(3) {state,efile,[],none,#Port<0.1>,infinity,undefined,true,{prim_state,false,undefined,undefined}}\ny(4) infinity\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,362}]}, {heap_size,1597}, {total_heap_size,12543}, {links,[#Port<10870.1>,<10870.0.0>]}, {memory,50568}, {message_queue_len,0}, {reductions,1020771}, {trap_exit,true}]}, {<10870.6.0>, [{registered_name,error_logger}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00e34db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00e8fc44 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,ns_log_mf_h,false,{state,\"logs\",10485760,10,431361,1,{file_descriptor,prim_file,{#Port<0.1578>,688}},[],#Fun},false},{handler,error_logger,false,[],false}]\ny(3) error_logger\ny(4) <0.2.0>\n\n0x00e8fc5c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,4181}, {total_heap_size,8362}, {links,[<10870.0.0>,<10870.31.0>,#Port<10870.1578>]}, {memory,33924}, {message_queue_len,0}, {reductions,130765}, {trap_exit,true}]}, {<10870.7.0>, [{registered_name,application_controller}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0453ee4c Return addr 0x00b1c194 ()\ny(0) []\ny(1) infinity\ny(2) application_controller\ny(3) {state,[],[],[],[{mnesia,<0.237.0>},{ns_server,<0.59.0>},{os_mon,<0.50.0>},{sasl,<0.39.0>},{stdlib,undefined},{kernel,<0.9.0>}],[],[{mnesia,temporary},{ns_server,temporary},{os_mon,temporary},{sasl,temporary},{stdlib,permanent},{kernel,permanent}],[],[]}\ny(4) application_controller\ny(5) <0.2.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,25}]}, {heap_size,2584}, {total_heap_size,20295}, {links, [<10870.50.0>,<10870.59.0>,<10870.237.0>,<10870.9.0>, <10870.39.0>,<10870.0.0>]}, {memory,81716}, {message_queue_len,0}, {reductions,31489}, {trap_exit,true}]}, {<10870.9.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0326fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ebd4d4 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.10.0>,{appl_data,kernel,[application_controller,erl_reply,auth,boot_server,code_server,disk_log_server,disk_log_sup,erl_prim_loader,error_logger,file_server_2,fixtable_server,global_group,global_name_server,heart,init,kernel_config,kernel_sup,net_kernel,net_sup,rex,user,os_server,ddll_server,erl_epmd,inet_db,pg2],undefined,{kernel,[]},[application,application_controller,application_master,application_starter,auth,code,packages,code_server,dist_util,erl_boot_server,erl_distribution,erl_reply,error_handler,error_logger,file,file_server,file_io_server,global,global_group,global_search,group,heart,hipe_unified_loader,inet6_tcp,inet6_tcp_dist,inet6_udp,inet6_sctp,inet_config,inet_hosts,inet_gethost_native,inet_tcp_dist,kernel,kernel_config,net,net_adm,net_kernel,os,ram_file,rpc,user,user_drv,user_sup,disk_log,disk_log_1,disk_log_server,disk_log_sup,dist_ac,erl_ddll,erl_epmd,erts_debug,gen_tcp,gen_udp,gen_sctp,inet,inet_db,inet_dns,inet_parse,inet_res,inet_tcp,inet_udp,inet_sctp,pg2,seq_trace,standard_error,wrap_log_reader],[],infinity,infinity},[],0,<0.0.0>}\ny(2) <0.7.0>\n\n0x00ebd4e4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<10870.7.0>,<10870.10.0>]}, {memory,3472}, {message_queue_len,0}, {reductions,44}, {trap_exit,true}]}, {<10870.10.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032710fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ebf8d8 Return addr 0x00b1c194 ()\ny(0) []\ny(1) kernel\ny(2) <0.11.0>\ny(3) <0.9.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.9.0>,<10870.11.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,72}, {trap_exit,true}]}, {<10870.11.0>, [{registered_name,kernel_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0302a708 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,kernel_sup},one_for_all,[{child,<0.34.0>,kernel_safe_sup,{supervisor,start_link,[{local,kernel_safe_sup},kernel,safe]},permanent,infinity,supervisor,[kernel]},{child,<0.33.0>,kernel_config,{kernel_config,start_link,[]},permanent,2000,worker,[kernel_config]},{child,<0.29.0>,user,{user_sup,start,[]},temporary,2000,supervisor,[user_sup]},{child,<0.27.0>,standard_error,{standard_error,start_link,[]},temporary,2000,supervisor,[user_sup]},{child,<0.26.0>,code_server,{code,start_link,[]},permanent,2000,worker,[code]},{child,<0.25.0>,file_server_2,{file_server,start_link,[]},permanent,2000,worker,[file,file_server,file_io_server,prim_file]},{child,<0.24.0>,global_group,{global_group,start_link,[]},permanent,2000,worker,[global_group]},{child,<0.18.0>,net_sup,{erl_distribution,start_link,[]},permanent,infinity,supervisor,[erl_distribution]},{child,<0.17.0>,inet_db,{inet_db,start_link,[]},permanent,2000,worker,[inet_db]},{child,<0.13.0>,global_name_server,{global,start_link,[]},permanent,2000,worker,[global]},{child,<0.12.0>,rex,{rpc,start_link,[]},permanent,2000,worker,[rpc]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,1,[],kernel,[]}\ny(4) kernel_sup\ny(5) <0.10.0>\n\n0x0302a724 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,4181}, {total_heap_size,8362}, {links, [<10870.26.0>,<10870.29.0>,<10870.33.0>,<10870.34.0>, <10870.27.0>,<10870.17.0>,<10870.24.0>,<10870.25.0>, <10870.18.0>,<10870.12.0>,<10870.13.0>,<10870.10.0>]}, {memory,34104}, {message_queue_len,0}, {reductions,3131}, {trap_exit,true}]}, {<10870.12.0>, [{registered_name,rex}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bd34c0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) rpc\ny(3) {1,{<0.3767.0>,{<10833.12006.0>,{#Ref<10833.0.0.246339>,'ns_1@10.2.1.102'}},nil,nil}}\ny(4) rex\ny(5) <0.11.0>\n\n0x04bd34dc Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,187}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<10870.11.0>]}, {memory,5996}, {message_queue_len,0}, {reductions,55620}, {trap_exit,true}]}, {<10870.13.0>, [{registered_name,global_name_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0536bd08 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) global\ny(3) {state,true,['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],[],[],'nonode@nohost',<0.14.0>,<0.15.0>,<0.16.0>,no_trace,false}\ny(4) global_name_server\ny(5) <0.11.0>\n\n0x0536bd24 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,8}]}, {heap_size,1597}, {total_heap_size,1974}, {links, [<10870.14.0>,<10870.16.0>,<10870.15.0>,<10870.11.0>]}, {memory,8504}, {message_queue_len,0}, {reductions,2085}, {trap_exit,true}]}, {<10870.14.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x032ab31c (global:loop_the_locker/1 + 588)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eb59b0 Return addr 0x032ab0b0 (global:init_the_locker/1 + 192)\ny(0) {multi,[],[],['ns_1@10.2.1.100','ns_1@10.2.1.101'],'ns_1@10.2.1.102',false,false}\ny(1) infinity\n\n0x00eb59bc Return addr 0x00b1c194 ()\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,610}, {total_heap_size,987}, {links,[<10870.13.0>]}, {memory,4324}, {message_queue_len,0}, {reductions,393}, {trap_exit,true}]}, {<10870.15.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x032af3c8 (global:collect_deletions/2 + 76)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ec691c Return addr 0x032af348 (global:loop_the_deleter/1 + 36)\ny(0) infinity\ny(1) []\ny(2) <0.13.0>\n\n0x00ec692c Return addr 0x00b1c194 ()\ny(0) <0.13.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,377}, {total_heap_size,754}, {links,[<10870.13.0>]}, {memory,3392}, {message_queue_len,0}, {reductions,232}, {trap_exit,false}]}, {<10870.16.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x032af4e8 (global:loop_the_registrar/0 + 12)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x044a7e50 Return addr 0x00b1c194 ()\ny(0) []\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,610}, {total_heap_size,987}, {links,[<10870.13.0>]}, {memory,4324}, {message_queue_len,0}, {reductions,249}, {trap_exit,false}]}, {<10870.17.0>, [{registered_name,inet_db}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eac098 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) inet_db\ny(3) {state,inet_db,inet_cache,inet_hosts_byname,inet_hosts_byaddr,inet_hosts_file_byname,inet_hosts_file_byaddr,#Ref<0.0.0.8>}\ny(4) inet_db\ny(5) <0.11.0>\n\n0x00eac0b4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,17}]}, {heap_size,377}, {total_heap_size,754}, {links,[<10870.11.0>]}, {memory,3452}, {message_queue_len,0}, {reductions,1754}, {trap_exit,true}]}, {<10870.18.0>, [{registered_name,net_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eb7878 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,net_sup},one_for_all,[{child,<0.21.0>,net_kernel,{net_kernel,start_link,[['ns_1@10.2.1.102',longnames]]},permanent,2000,worker,[net_kernel]},{child,<0.20.0>,auth,{auth,start_link,[]},permanent,2000,worker,[auth]},{child,<0.19.0>,erl_epmd,{erl_epmd,start_link,[]},permanent,2000,worker,[erl_epmd]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,1,[],erl_distribution,['ns_1@10.2.1.102',longnames]}\ny(4) net_sup\ny(5) <0.11.0>\n\n0x00eb7894 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,987}, {links, [<10870.19.0>,<10870.20.0>,<10870.21.0>,<10870.11.0>]}, {memory,4444}, {message_queue_len,0}, {reductions,265}, {trap_exit,true}]}, {<10870.19.0>, [{registered_name,erl_epmd}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ea33c8 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) erl_epmd\ny(3) {state,#Port<0.473>,21100,ns_1}\ny(4) erl_epmd\ny(5) <0.18.0>\n\n0x00ea33e4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.18.0>,#Port<10870.473>]}, {memory,1388}, {message_queue_len,0}, {reductions,135}, {trap_exit,false}]}, {<10870.20.0>, [{registered_name,auth}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00e9e634 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) auth\ny(3) {state,pmqchiglstnppkwf,12}\ny(4) auth\ny(5) <0.18.0>\n\n0x00e9e650 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,610}, {total_heap_size,987}, {links,[<10870.18.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,397}, {trap_exit,true}]}, {<10870.21.0>, [{registered_name,net_kernel}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04ee9234 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) net_kernel\ny(3) {state,'ns_1@10.2.1.102','ns_1@10.2.1.102',longnames,{tick,<0.23.0>,15000},7000,sys_dist,[{<0.186.0>,'ns_1@10.2.1.101'},{<0.182.0>,'ns_1@10.2.1.100'}],[],[{listen,#Port<0.460>,<0.22.0>,{net_address,{{0,0,0,0},21100},\"WIN-3IJI1HGEL7Q\",tcp,inet},inet_tcp_dist}],[],0,all}\ny(4) net_kernel\ny(5) <0.18.0>\n\n0x04ee9250 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,10}]}, {heap_size,2584}, {total_heap_size,2961}, {links, [<10870.23.0>,<10870.182.0>,<10870.186.0>, <10870.18.0>,<10870.22.0>,#Port<10870.460>]}, {memory,12396}, {message_queue_len,0}, {reductions,3973}, {trap_exit,true}]}, {<10870.22.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,accept_loop,2}}, {backtrace, <<"Program counter: 0x00e292d8 (prim_inet:accept0/2 + 92)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eac7b8 Return addr 0x032d8894 (inet_tcp:accept/1 + 20)\ny(0) 121\ny(1) #Port<0.460>\n\n0x00eac7c4 Return addr 0x032d2550 (inet_tcp_dist:accept_loop/2 + 48)\ny(0) []\n\n0x00eac7cc Return addr 0x00b1c194 ()\ny(0) []\ny(1) #Port<0.460>\ny(2) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,4}]}, {heap_size,377}, {total_heap_size,754}, {links,[<10870.21.0>]}, {memory,3432}, {message_queue_len,0}, {reductions,789}, {trap_exit,false}]}, {<10870.23.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{net_kernel,ticker,2}}, {backtrace, <<"Program counter: 0x032e2384 (net_kernel:ticker_loop/2 + 28)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ea3790 Return addr 0x00b1c194 ()\ny(0) 15000\ny(1) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.21.0>]}, {memory,1308}, {message_queue_len,0}, {reductions,297}, {trap_exit,false}]}, {<10870.24.0>, [{registered_name,global_group}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eb7c28 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) global_group\ny(3) {state,no_conf,true,[],[],[],[],[],'ns_1@10.2.1.102',[],normal,normal}\ny(4) global_group\ny(5) <0.11.0>\n\n0x00eb7c44 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.11.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,76}, {trap_exit,true}]}, {<10870.25.0>, [{registered_name,file_server_2}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04c7e040 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) file_server\ny(3) #Port<0.496>\ny(4) file_server_2\ny(5) <0.11.0>\n\n0x04c7e05c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1396}]}, {heap_size,6765}, {total_heap_size,13530}, {links,[#Port<10870.496>,<10870.11.0>]}, {memory,54576}, {message_queue_len,0}, {reductions,1945575}, {trap_exit,true}]}, {<10870.26.0>, [{registered_name,code_server}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x0330727c (code_server:loop/1 + 64)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04460b00 Return addr 0x00b1c194 ()\ny(0) {state,<0.11.0>,\"c:/PROGRA~1/Membase/Server/bin/erlang\",[\"bin/ns_server/deps/gen_smtp/ebin\",\"bin/ns_server/deps/menelaus/deps/mochiweb/ebin\",\"bin/ns_server/deps/menelaus/ebin\",\"bin/ns_server/ebin\",\".\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/kernel-2.13.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/stdlib-1.16.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/xmerl-1.2.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/webtool-0.8.5/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/typer-0.1.7.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/tv-2.1.4.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/tools-2.6.5/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/test_server-3.3.5/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/syntax_tools-1.6.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/ssl-3.10.7/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/ssh-1.1.7/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/snmp-4.15/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/sasl-2.1.8/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/runtime_tools-1.8.2/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/reltool-0.5.2/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/public_key-0.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/pman-2.7.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/percept-0.8.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/parsetools-2.0.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/otp_mibs-1.0.6/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/os_mon-2.2.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/odbc-2.10.6/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/observer-0.9.8.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/mnesia-4.4.12/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/inviso-0.6.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/inets-5.2/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/hipe-3.7.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/eunit-2.1.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/et-1.3.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/erts-5.7.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/erl_interface-3.6.4\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/erl_docgen-0.1\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/dialyzer-2.1.0/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/debugger-3.2.1/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/crypto-1.6.3/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/compiler-4.6.4/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/common_test-1.4.6/ebin\",\"c:/PROGRA~1/Membase/Server/bin/erlang/lib/appmon-2.1.10.2/ebin\",\"c:/Program Files/Membase/Server/bin/ns_server/deps/menelaus/deps/erlwsh/ebin\"],4111,8208,no_cache,interactive,[]}\ny(1) <0.11.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,221}]}, {heap_size,4181}, {total_heap_size,21892}, {links,[<10870.11.0>]}, {memory,87944}, {message_queue_len,0}, {reductions,193766}, {trap_exit,true}]}, {<10870.27.0>, [{registered_name,standard_error_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ea4118 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_bridge\ny(3) {state,standard_error,<0.28.0>,<0.28.0>,{local,standard_error_sup}}\ny(4) standard_error_sup\ny(5) <0.11.0>\n\n0x00ea4134 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.11.0>,<10870.28.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,40}, {trap_exit,true}]}, {<10870.28.0>, [{registered_name,standard_error}, {status,waiting}, {initial_call,{standard_error,server,2}}, {backtrace, <<"Program counter: 0x032f196c (standard_error:server_loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ea44e4 Return addr 0x00b1c194 ()\ny(0) #Port<0.792>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.27.0>,#Port<10870.792>]}, {memory,1388}, {message_queue_len,0}, {reductions,7}, {trap_exit,true}]}, {<10870.29.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eb4678 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_bridge\ny(3) {state,user_sup,<0.31.0>,<0.31.0>,{<0.29.0>,user_sup}}\ny(4) <0.29.0>\ny(5) <0.11.0>\n\n0x00eb4694 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,1597}, {total_heap_size,1597}, {links,[<10870.11.0>,<10870.31.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,166}, {trap_exit,true}]}, {<10870.31.0>, [{registered_name,user}, {status,waiting}, {initial_call,{user,server,2}}, {backtrace, <<"Program counter: 0x0332dd88 (user:get_chars/8 + 176)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0453c224 Return addr 0x0332aea0 (user:do_io_request/5 + 56)\ny(0) []\ny(1) []\ny(2) []\ny(3) unicode\ny(4) start\ny(5) {[],[]}\ny(6) #Port<0.830>\ny(7) {erl_scan,tokens,[1]}\ny(8) get_until\ny(9) io_lib\ny(10) [40,\"ns_1@10.2.1.102\",41,\"1\",62,32]\n\n0x0453c254 Return addr 0x0332adc8 (user:server_loop/2 + 784)\ny(0) #Port<0.830>\ny(1) <0.31.0>\ny(2) <0.48.0>\n\n0x0453c264 Return addr 0x0332a894 (user:catch_loop/3 + 56)\ny(0) #Port<0.830>\n\n0x0453c26c Return addr 0x00b1c194 ()\ny(0) <0.32.0>\ny(1) #Port<0.830>\ny(2) Catch 0x0332a894 (user:catch_loop/3 + 56)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,5778}, {links, [<10870.29.0>,<10870.32.0>,#Port<10870.830>, <10870.6.0>]}, {memory,23648}, {message_queue_len,0}, {reductions,40320}, {trap_exit,true}]}, {<10870.32.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03e52924 (shell:get_command1/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0301d970 Return addr 0x03e522ec (shell:server_loop/7 + 148)\ny(0) []\ny(1) 12305\ny(2) []\ny(3) <0.47.0>\ny(4) <0.48.0>\n\n0x0301d988 Return addr 0x00b1c194 ()\ny(0) []\ny(1) []\ny(2) 1\ny(3) 20\ny(4) 20\ny(5) []\ny(6) 12305\ny(7) []\ny(8) 0\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,4}]}, {heap_size,2584}, {total_heap_size,20295}, {links,[<10870.47.0>,<10870.48.0>,<10870.31.0>]}, {memory,81656}, {message_queue_len,0}, {reductions,5170}, {trap_exit,true}]}, {<10870.33.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ea0400 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) kernel_config\ny(3) []\ny(4) <0.33.0>\ny(5) <0.11.0>\n\n0x00ea041c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.11.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,268}, {trap_exit,true}]}, {<10870.34.0>, [{registered_name,kernel_safe_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030424b4 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,kernel_safe_sup},one_for_one,[{child,<0.134.0>,dets,{dets_server,start_link,[]},permanent,2000,worker,[dets_server]},{child,<0.133.0>,dets_sup,{dets_sup,start_link,[]},permanent,1000,supervisor,[dets_sup]},{child,<0.130.0>,disk_log_server,{disk_log_server,start_link,[]},permanent,2000,worker,[disk_log_server]},{child,<0.129.0>,disk_log_sup,{disk_log_sup,start_link,[]},permanent,1000,supervisor,[disk_log_sup]},{child,<0.57.0>,timer_server,{timer,start_link,[]},permanent,1000,worker,[timer]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},4,3600,[],kernel,safe}\ny(4) kernel_safe_sup\ny(5) <0.11.0>\n\n0x030424d0 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,610}, {total_heap_size,987}, {links, [<10870.130.0>,<10870.133.0>,<10870.134.0>, <10870.57.0>,<10870.129.0>,<10870.11.0>]}, {memory,4484}, {message_queue_len,0}, {reductions,388}, {trap_exit,true}]}, {<10870.39.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0326fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ebe984 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.40.0>,{appl_data,sasl,[sasl_sup,alarm_handler,overload,release_handler],undefined,{sasl,[]},[sasl,alarm_handler,format_lib_supp,misc_supp,overload,rb,rb_format_supp,release_handler,release_handler_1,erlsrv,sasl_report,sasl_report_tty_h,sasl_report_file_h,systools,systools_make,systools_rc,systools_relup,systools_lib],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x00ebe994 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.7.0>,<10870.40.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<10870.40.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032710fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ebf0e8 Return addr 0x00b1c194 ()\ny(0) {state,tty,undefined}\ny(1) sasl\ny(2) <0.41.0>\ny(3) <0.39.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.39.0>,<10870.41.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,70}, {trap_exit,true}]}, {<10870.41.0>, [{registered_name,sasl_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ebed28 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,sasl_sup},one_for_one,[{child,<0.45.0>,release_handler,{release_handler,start_link,[]},permanent,2000,worker,[]},{child,<0.42.0>,sasl_safe_sup,{supervisor,start_link,[{local,sasl_safe_sup},sasl,safe]},permanent,infinity,supervisor,[sasl]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,1,[],sasl,[]}\ny(4) sasl_sup\ny(5) <0.40.0>\n\n0x00ebed44 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<10870.42.0>,<10870.45.0>,<10870.40.0>]}, {memory,2916}, {message_queue_len,0}, {reductions,158}, {trap_exit,true}]}, {<10870.42.0>, [{registered_name,sasl_safe_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ebce20 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,sasl_safe_sup},one_for_one,[{child,<0.44.0>,overload,{overload,start_link,[]},permanent,2000,worker,[overload]},{child,<0.43.0>,alarm_handler,{alarm_handler,start_link,[]},permanent,2000,worker,dynamic}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},4,3600,[],sasl,safe}\ny(4) sasl_safe_sup\ny(5) <0.41.0>\n\n0x00ebce3c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<10870.43.0>,<10870.44.0>,<10870.41.0>]}, {memory,2916}, {message_queue_len,0}, {reductions,174}, {trap_exit,true}]}, {<10870.43.0>, [{registered_name,alarm_handler}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00e34db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ebca74 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,alarm_handler,false,[],false}]\ny(3) alarm_handler\ny(4) <0.42.0>\n\n0x00ebca8c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.42.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,28}, {trap_exit,true}]}, {<10870.44.0>, [{registered_name,overload}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ebc6c0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) overload\ny(3) {state,0,0,8.000000e-001,468,1.000000e-001,{0,0},clear}\ny(4) overload\ny(5) <0.42.0>\n\n0x00ebc6dc Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.42.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,39}, {trap_exit,false}]}, {<10870.45.0>, [{registered_name,release_handler}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ea4e5c Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) release_handler\ny(3) {state,[],\"C:\\PROGRA~1\\Membase\\Server\\bin\\erlang\",\"c:/PROGRA~1/Membase/Server/bin/erlang/releases\",[{release,\"OTP APN 181 01\",\"R13B03\",undefined,[],permanent}],undefined,{no_check,\"c:/PROGRA~1/Membase/Server/bin/erlang/bin/start\"},false,false,false,[]}\ny(4) release_handler\ny(5) <0.41.0>\n\n0x00ea4e78 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,5}]}, {heap_size,610}, {total_heap_size,987}, {links,[<10870.41.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,1249}, {trap_exit,false}]}, {<10870.47.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03e55484 (shell:eval_loop/3 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ea1c5c Return addr 0x00b1c194 ()\ny(0) []\ny(1) []\ny(2) 12305\ny(3) []\ny(4) <0.32.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.32.0>]}, {memory,1308}, {message_queue_len,0}, {reductions,8}, {trap_exit,false}]}, {<10870.48.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03eb6ee8 (io:wait_io_mon_reply/2 + 28)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ebdc34 Return addr 0x03eb682c (io:parse_erl_exprs/3 + 100)\ny(0) #Ref<0.0.0.44>\ny(1) <0.31.0>\n\n0x00ebdc40 Return addr 0x03e5c180 (shell:'-get_command/5-fun-0-'/1 + 20)\ny(0) []\n\n0x00ebdc48 Return addr 0x00b1c194 ()\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.32.0>]}, {memory,1348}, {message_queue_len,0}, {reductions,14}, {trap_exit,false}]}, {<10870.50.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0326fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ec0b8c Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.51.0>,{appl_data,os_mon,[os_mon_sup,os_mon_sysinfo,disksup,memsup,cpu_sup,os_sup_server],undefined,{os_mon,[]},[os_mon,os_mon_mib,os_sup,disksup,memsup,cpu_sup,os_mon_sysinfo,nteventlog],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x00ec0b9c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.7.0>,<10870.51.0>]}, {memory,1388}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<10870.51.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032710fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ec0f40 Return addr 0x00b1c194 ()\ny(0) []\ny(1) os_mon\ny(2) <0.52.0>\ny(3) <0.50.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.50.0>,<10870.52.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,40}, {trap_exit,true}]}, {<10870.52.0>, [{registered_name,os_mon_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ec3da0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,os_mon_sup},one_for_one,[{child,<0.55.0>,memsup,{memsup,start_link,[]},permanent,2000,worker,[memsup]},{child,<0.54.0>,disksup,{disksup,start_link,[]},permanent,2000,worker,[disksup]},{child,<0.53.0>,os_mon_sysinfo,{os_mon_sysinfo,start_link,[]},permanent,2000,worker,[os_mon_sysinfo]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},5,3600,[],os_mon,[]}\ny(4) os_mon_sup\ny(5) <0.51.0>\n\n0x00ec3dbc Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links, [<10870.53.0>,<10870.54.0>,<10870.55.0>,<10870.51.0>]}, {memory,3512}, {message_queue_len,0}, {reductions,274}, {trap_exit,true}]}, {<10870.53.0>, [{registered_name,os_mon_sysinfo}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c4ee18 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) os_mon_sysinfo\ny(3) {state,#Port<0.1438>}\ny(4) os_mon_sysinfo\ny(5) <0.52.0>\n\n0x00c4ee34 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,53}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<10870.52.0>,#Port<10870.1438>]}, {memory,8352}, {message_queue_len,0}, {reductions,4846}, {trap_exit,true}]}, {<10870.54.0>, [{registered_name,disksup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c67080 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) disksup\ny(3) {state,80,60000,{win32,nt},[{\"C:\\\",49423972,41},{\"D:\\\",52797620,0},{\"G:\\\",34724465,17}],not_used}\ny(4) disksup\ny(5) <0.52.0>\n\n0x00c6709c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,103}]}, {heap_size,233}, {total_heap_size,1830}, {links,[<10870.52.0>]}, {memory,7756}, {message_queue_len,0}, {reductions,39319}, {trap_exit,true}]}, {<10870.55.0>, [{registered_name,memsup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c68bb0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) memsup\ny(3) {state,{win32,nt},false,{3351322624,4284698624},{<0.307.0>,4114268},false,60000,30000,8.000000e-001,5.000000e-002,<0.3756.0>,undefined,undefined,[],[]}\ny(4) memsup\ny(5) <0.52.0>\n\n0x00c68bcc Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,87}]}, {heap_size,377}, {total_heap_size,987}, {links,[<10870.52.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,88068}, {trap_exit,true}]}, {<10870.57.0>, [{registered_name,timer_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04c808ac Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) 560\ny(2) timer\ny(3) []\ny(4) timer_server\ny(5) <0.34.0>\n\n0x04c808c8 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,586}]}, {heap_size,2584}, {total_heap_size,2961}, {links, [<10870.199.0>,<10870.249.0>,<10870.342.0>, <10870.507.0>,<10870.269.0>,<10870.200.0>, <10870.245.0>,<10870.172.0>,<10870.179.0>, <10870.34.0>]}, {memory,12460}, {message_queue_len,0}, {reductions,145568}, {trap_exit,true}]}, {<10870.59.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0326fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ede554 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.60.0>,{appl_data,ns_server,[ns_server_sup,ns_config,ns_config_sup,ns_config_events,ns_node_disco,ns_node_disco_events],undefined,{ns_server,[]},[misc,ns_config,ns_config_default,ns_config_log,ns_config_sup,ns_config_rep,ns_log,ns_node_disco,ns_node_disco_conf_events,ns_node_disco_log,ns_port_init,ns_port_server,ns_port_sup,ns_server,ns_server_sup],[menelaus],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x00ede564 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<10870.7.0>,<10870.60.0>]}, {memory,1964}, {message_queue_len,0}, {reductions,46}, {trap_exit,true}]}, {<10870.60.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032710fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ec6d78 Return addr 0x00b1c194 ()\ny(0) []\ny(1) ns_server\ny(2) <0.61.0>\ny(3) <0.59.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.59.0>,<10870.61.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,50}, {trap_exit,true}]}, {<10870.61.0>, [{registered_name,ns_server_cluster_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0301b118 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_server_cluster_sup},one_for_one,[{child,<0.171.0>,ns_server_sup,{ns_server_sup,start_link,[]},permanent,infinity,supervisor,[ns_server_sup]},{child,<0.64.0>,ns_config_sup,{ns_config_sup,start_link,[]},permanent,infinity,supervisor,[ns_config_sup]},{child,<0.63.0>,ns_cluster,{ns_cluster,start_link,[]},permanent,5000,worker,[ns_cluster]},{child,<0.62.0>,dist_manager,{dist_manager,start_link,[]},permanent,10,worker,[dist_manager]},{child,undefined,log_os_info,{log_os_info,start_link,[]},transient,10,worker,[log_os_info]},{child,undefined,ns_log_mf_h,{ns_log_mf_h,start_link,[]},transient,10,worker,[ns_log_mf_h]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,1,[],ns_server_cluster_sup,[]}\ny(4) ns_server_cluster_sup\ny(5) <0.60.0>\n\n0x0301b134 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,377}, {total_heap_size,987}, {links, [<10870.62.0>,<10870.64.0>,<10870.171.0>,<10870.63.0>, <10870.60.0>]}, {memory,4464}, {message_queue_len,0}, {reductions,2794}, {trap_exit,true}]}, {<10870.62.0>, [{registered_name,dist_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00edf3c8 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) dist_manager\ny(3) {state,false,\"127.0.0.1\"}\ny(4) dist_manager\ny(5) <0.61.0>\n\n0x00edf3e4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.61.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,132}, {trap_exit,false}]}, {<10870.63.0>, [{registered_name,ns_cluster}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030613c4 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_cluster\ny(3) {state}\ny(4) ns_cluster\ny(5) <0.61.0>\n\n0x030613e0 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,10}]}, {heap_size,10946}, {total_heap_size,15127}, {links,[<10870.61.0>]}, {memory,60944}, {message_queue_len,0}, {reductions,13559}, {trap_exit,false}]}, {<10870.64.0>, [{registered_name,ns_config_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04468418 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_config_sup},rest_for_one,[{child,undefined,ns_config_log,{ns_config_log,start_link,[]},transient,10,worker,[]},{child,undefined,ns_config_isasl_sync,{ns_config_isasl_sync,start_link,[]},transient,10,worker,[]},{child,<0.66.0>,ns_config,{ns_config,start_link,[\"priv/config\",ns_config_default]},permanent,10,worker,[ns_config,ns_config_default]},{child,<0.65.0>,ns_config_events,{gen_event,start_link,[{local,ns_config_events}]},permanent,10,worker,[]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},3,10,[],ns_config_sup,[]}\ny(4) ns_config_sup\ny(5) <0.61.0>\n\n0x04468434 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,6765}, {total_heap_size,7142}, {links,[<10870.65.0>,<10870.66.0>,<10870.61.0>]}, {memory,29044}, {message_queue_len,0}, {reductions,1037}, {trap_exit,true}]}, {<10870.65.0>, [{registered_name,ns_config_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00b1c190 (unknown function)\nCP: 0x00b1c194 ()\narity = 3\n proc_lib\n wake_up\n [gen_event,wake_hib,[<0.64.0>,ns_config_events,[{handler,ns_pubsub,#Ref<0.0.0.1476>,{state,#Fun,ok},<0.308.0>},{handler,ns_pubsub,#Ref<0.0.0.1222>,{state,#Fun,undefined},<0.267.0>},{handler,ns_pubsub,#Ref<0.0.0.1062>,{state,#Fun,undefined},<0.230.0>},{handler,ns_port_init,false,{state},false},{handler,menelaus_event,ns_config_events,{state,ns_config_events,[{ip,\"0.0.0.0\"},{port,8091},{approot,\"c:/Program Files/Membase/Server/bin/ns_server/deps/menelaus/priv/public\"},{docroot,\"c:/Program Files/Membase/Server/docs\"}],[{<0.218.0>,#Ref<0.0.0.1284>}]},false},{handler,ns_node_disco_conf_events,false,{state},false},{handler,menelaus_event,ns_config_events,{state,ns_config_events,[{ip,\"0.0.0.0\"},{port,8091},{approot,\"c:/Program Files/Membase/Server/bin/ns_server/deps/menelaus/priv/public\"},{docroot,\"c:/Program Files/Membase/Server/docs\"}],[]},false},{handler,ns_node_disco_conf_events,false,{state},false},{handler,ns_config_log,false,{state,[{rebalance_status,{none,<<76 bytes>>}}]},false},{handler,ns_config_isasl_sync,false,{state,[{\"default\",[]}],\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\",1,\"_admin\",\"_admin\"},false}],[]]]\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,835}, {total_heap_size,835}, {links, [<10870.230.0>,<10870.267.0>,<10870.308.0>, <10870.64.0>]}, {memory,3876}, {message_queue_len,0}, {reductions,430155}, {trap_exit,true}]}, {<10870.66.0>, [{registered_name,ns_config}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04f67dc0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_config\ny(3) {config,{full,\"priv/config\",undefined,ns_config_default},[[],[{directory,\"c:/Program Files/Membase/Server/config\"},{nodes_wanted,['ns_1@10.2.1.102']},{{node,'ns_1@10.2.1.102',membership},active},{rest,[{port,8091}]},{rest_creds,[{creds,[]}]},{{node,'ns_1@10.2.1.102',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.102',memcached},[{port,11210},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{memory_quota,3268},{buckets,[{configs,[]}]},{moxi,[{port,11211},{verbosity,[]}]},{port_servers,[{moxi,\"./bin/moxi/moxi\",[\"-Z\",{\"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",[port]},\"-z\",{\"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming\",[{rest,port}]},\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",{\"~s\",[{ns_moxi_sup,rest_user,[]}]}},{\"MOXI_SASL_PLAIN_PWD\",{\"~s\",[{ns_moxi_sup,rest_pass,[]}]}}]},use_stdio,stderr_to_stdout,stream]},{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",{\"~B\",[port]},\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",{\"admin=~s;default_bucket_name=default;auto_create=false\",[admin_user]},{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",{\"~s\",[{isasl,path}]}},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]}]},{{node,'ns_1@10.2.1.102',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{alerts,[{email,[]},{email_alerts,false},{email_server,[{user,undefined},{pass,undefined},{addr,undefined},{port,undefined},{encrypt,false}]},{alerts,[server_down,server_unresponsive,server_up,server_joined,server_left,bucket_created,bucket_deleted,bucket_auth_failed]}]},{replication,[{enabled,true}]}]],[[{{node,'ns_1@10.2.1.101',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.102',memcached},[{port,11210},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{otp,[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{cookie,pmqchiglstnppkwf}]},{memory_quota,3268},{{node,'ns_1@10.2.1.102',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.100',membership},active},{rebalance_status,{none,<<76 bytes>>}},{{node,'ns_1@10.2.1.101',membership},active},{rest_creds,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{creds,[{\"Administrator\",[{password,\"j4958ph\"}]}]}]},{buckets,[{'_vclock',[{'ns_1@10.2.1.100',{9,63461309965}}]},{configs,[{\"default\",[{num_replicas,1},{ram_quota,3426746368},{auth_type,sasl},{sasl_password,[]},{type,membase},{num_vbuckets,1024},{ht_size,3079},{tap_keepalive,0},{tap_noop_interval,20},{max_txn_size,1000},{ht_locks,5},{servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]}]},{port_servers,[{moxi,\"./bin/moxi/moxi\",[\"-Z\",{\"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",[port]},\"-z\",{\"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming\",[{rest,port}]},\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",{\"~s\",[{ns_moxi_sup,rest_user,[]}]}},{\"MOXI_SASL_PLAIN_PWD\",{\"~s\",[{ns_moxi_sup,rest_pass,[]}]}}]},use_stdio,stderr_to_stdout,stream]},{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",{\"~B\",[port]},\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",{\"admin=~s;default_bucket_name=default;auto_create=false\",[admin_user]},{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",{\"~s\",[{isasl,path}]}},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]}]},{alerts,[{email,[]},{email_alerts,false},{email_server,[{user,undefined},{pass,undefined},{addr,undefined},{port,undefined},{encrypt,false}]},{alerts,[server_down,server_unresponsive,server_up,server_joined,server_left,bucket_created,bucket_deleted,bucket_auth_failed]}]},{nodes_wanted,[{'_vclock',[{'ns_1@10.2.1.100',{2,63461308289}}]},'ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{rest,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{port,8091}]},{{node,'ns_1@10.2.1.102',membership},active},{{node,'ns_1@10.2.1.100',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.101',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.102',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{moxi,[{port,11211},{verbosity,[]}]},{replication,[{enabled,true}]},{{node,'ns_1@10.2.1.100',memcached},[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307259}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.101',memcached},[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.100',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]}]],ns_config_default}\ny(4) ns_config\ny(5) <0.64.0>\n\n0x04f67ddc Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,15}]}, {heap_size,75025}, {total_heap_size,121393}, {links,[<10870.64.0>]}, {memory,486008}, {message_queue_len,0}, {reductions,355416}, {trap_exit,false}]}, {<10870.129.0>, [{registered_name,disk_log_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c6b3e0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,disk_log_sup},simple_one_for_one,[{child,undefined,disk_log,{disk_log,istart_link,[]},temporary,1000,worker,[disk_log]}],{dict,1,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[[<0.253.0>,<0.130.0>]],[],[]}}},4,3600,[],disk_log_sup,[]}\ny(4) disk_log_sup\ny(5) <0.34.0>\n\n0x00c6b3fc Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,21}]}, {heap_size,377}, {total_heap_size,987}, {links,[<10870.253.0>,<10870.34.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,5272}, {trap_exit,true}]}, {<10870.130.0>, [{registered_name,disk_log_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c66cd0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) disk_log_server\ny(3) {state,[]}\ny(4) disk_log_server\ny(5) <0.34.0>\n\n0x00c66cec Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,25}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<10870.253.0>,<10870.34.0>]}, {memory,10792}, {message_queue_len,0}, {reductions,6194}, {trap_exit,true}]}, {<10870.133.0>, [{registered_name,dets_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eabaa8 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,dets_sup},simple_one_for_one,[{child,undefined,dets,{dets,istart_link,[]},temporary,30000,worker,[dets]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},4,3600,[],dets_sup,[]}\ny(4) dets_sup\ny(5) <0.34.0>\n\n0x00eabac4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,7}]}, {heap_size,377}, {total_heap_size,987}, {links,[<10870.34.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,720}, {trap_exit,true}]}, {<10870.134.0>, [{registered_name,dets}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eb500c Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) dets_server\ny(3) {state,40995,[<0.34.0>],[]}\ny(4) dets\ny(5) <0.34.0>\n\n0x00eb5028 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,10}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<10870.34.0>]}, {memory,6824}, {message_queue_len,0}, {reductions,1171}, {trap_exit,true}]}, {<10870.171.0>, [{registered_name,ns_server_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0459c42c Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_server_sup},one_for_one,[{child,<0.309.0>,ns_tick,{ns_tick,start_link,[]},permanent,10,worker,[ns_tick]},{child,<0.308.0>,ns_moxi_sup,{ns_moxi_sup,start_link,[]},permanent,infinity,supervisor,[ns_moxi_sup]},{child,<0.267.0>,ns_bad_bucket_sup,{ns_bucket_sup,start_link,[ns_bad_bucket_sup,#Fun,ns_bad_bucket_worker]},permanent,infinity,supervisor,[ns_bucket_sup]},{child,<0.266.0>,ns_bad_bucket_worker,{work_queue,start_link,[ns_bad_bucket_worker]},permanent,10,worker,[work_queue]},{child,<0.234.0>,ns_mnesia,{ns_mnesia,start_link,[]},permanent,10000,worker,[ns_mnesia]},{child,<0.233.0>,ns_orchestrator,{ns_orchestrator,start_link,[]},permanent,20,worker,[ns_orchestrator]},{child,<0.230.0>,ns_good_bucket_sup,{ns_bucket_sup,start_link,[ns_good_bucket_sup,#Fun,ns_good_bucket_worker]},permanent,infinity,supervisor,[ns_bucket_sup]},{child,<0.229.0>,ns_good_bucket_worker,{work_queue,start_link,[ns_good_bucket_worker]},permanent,10,worker,[work_queue]},{child,<0.228.0>,ns_stats_event,{gen_event,start_link,[{local,ns_stats_event}]},permanent,10,worker,dynamic},{child,<0.227.0>,ns_tick_event,{gen_event,start_link,[{local,ns_tick_event}]},permanent,10,worker,dynamic},{child,<0.221.0>,ns_port_sup,{ns_port_sup,start_link,[]},permanent,10,worker,[ns_port_sup]},{child,<0.216.0>,menelaus,{menelaus_app,start_subapp,[]},permanent,infinity,supervisor,[menelaus_app]},{child,<0.200.0>,ns_doctor,{ns_doctor,start_link,[]},permanent,10,worker,[ns_doctor]},{child,<0.199.0>,ns_heart,{ns_heart,start_link,[]},permanent,10,worker,[ns_heart]},{child,<0.177.0>,ns_node_disco_sup,{ns_node_disco_sup,start_link,[]},permanent,infinity,supervisor,[ns_node_disco_sup]},{child,<0.174.0>,ns_mail_sup,{ns_mail_sup,start_link,[]},permanent,infinity,supervisor,[ns_mail_sup]},{child,<0.173.0>,ns_log_events,{gen_event,start_link,[{local,ns_log_events}]},permanent,10,worker,dynamic},{child,<0.172.0>,ns_log,{ns_log,start_link,[]},permanent,10,worker,[ns_log]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_server_sup,[]}\ny(4) ns_server_sup\ny(5) <0.61.0>\n\n0x0459c448 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,46368}, {total_heap_size,121393}, {links, [<10870.216.0>,<10870.229.0>,<10870.266.0>, <10870.308.0>,<10870.309.0>,<10870.267.0>, <10870.233.0>,<10870.234.0>,<10870.230.0>, <10870.227.0>,<10870.228.0>,<10870.221.0>, <10870.174.0>,<10870.199.0>,<10870.200.0>, <10870.177.0>,<10870.172.0>,<10870.173.0>, <10870.61.0>]}, {memory,486368}, {message_queue_len,0}, {reductions,55933}, {trap_exit,true}]}, {<10870.172.0>, [{registered_name,ns_log}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x053b2e28 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_log\ny(3) {state,[{log_entry,{1294,89069,648003},'ns_1@10.2.1.102',ns_node_disco,3,\"Initial otp cookie generated: ~p\",[jstohszpzysoucmg],info},{log_entry,{1294,89069,695002},'ns_1@10.2.1.102',menelaus_app,1,\"Membase Server has started on web port ~p on node ~p.\",[8091,'ns_1@10.2.1.102'],info},{log_entry,{1294,89089,429002},'ns_1@10.2.1.102',menelaus_app,1,\"Membase Server has started on web port ~p on node ~p.\",[8091,'ns_1@10.2.1.102'],info},{log_entry,{1294,89089,585005},'ns_1@10.2.1.102',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.102','ns_1@10.2.1.101'],info},{log_entry,{1294,89089,773024},'ns_1@10.2.1.102',ns_cluster,3,\"Node ~s joined cluster\",['ns_1@10.2.1.102'],info},{log_entry,{1294,89089,788403},'ns_1@10.2.1.100',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.100','ns_1@10.2.1.102'],info},{log_entry,{1294,89090,304213},'ns_1@10.2.1.101',ns_node_disco,4,\"Node ~p saw that node ~p came up.\",['ns_1@10.2.1.101','ns_1@10.2.1.102'],info},{log_entry,{1294,89092,596400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[]],info},{log_entry,{1294,89098,696400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[wait_for_memcached_failed],info},{log_entry,{1294,89176,806003},'ns_1@10.2.1.102',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.102',1],info},{log_entry,{1294,89428,372401},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n (repeated ~p times)\",[wait_for_memcached_failed,2],info},{log_entry,{1294,89428,372401},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n (repeated ~p times)\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[],3],info},{log_entry,{1294,89842,961400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[stopped],info},{log_entry,{1294,89861,634400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[]],info},{log_entry,{1294,90758,702400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[stopped],info},{log_entry,{1294,90765,67400},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[]],info},{log_entry,{1294,90778,796210},'ns_1@10.2.1.101',ns_memcached,4,\"Control connection to memcached on ~p disconnected: ~p\",['ns_1@10.2.1.101',{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]}],info},{log_entry,{1294,90778,858400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[{{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},{gen_server,call,[{'ns_memcached-default','ns_1@10.2.1.101'},{delete_vbucket,633},30000]}}],info},{log_entry,{1294,90781,885212},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.101',0],info},{log_entry,{1294,90813,53400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[{{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},{gen_server,call,[{'ns_memcached-default','ns_1@10.2.1.101'},{delete_vbucket,65},30000]}}],info},{log_entry,{1294,90835,18400},'ns_1@10.2.1.100',ns_orchestrator,2,\"Rebalance exited with reason ~p~n\",[{{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},{gen_server,call,[{'ns_memcached-default','ns_1@10.2.1.101'},{delete_vbucket,70},30000]}}],info},{log_entry,{1294,91107,927211},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds. (repeated ~p times)\",[\"default\",'ns_1@10.2.1.101',0,9],info},{log_entry,{1294,91107,927211},'ns_1@10.2.1.101',ns_memcached,4,\"Control connection to memcached on ~p disconnected: ~p (repeated ~p times)\",['ns_1@10.2.1.101',{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]},9],info},{log_entry,{1294,91108,378401},'ns_1@10.2.1.100',ns_orchestrator,4,\"Starting rebalance, KeepNodes = ~p, EjectNodes = ~p~n (repeated ~p times)\",[['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],[],2],info},{log_entry,{1294,91120,672210},'ns_1@10.2.1.101',ns_memcached,4,\"Control connection to memcached on ~p disconnected: ~p\",['ns_1@10.2.1.101',{{badmatch,{error,timeout}},[{mc_client_binary,cmd_binary_vocal_recv,5},{mc_client_binary,delete_vbucket,2},{ns_memcached,handle_call,3},{gen_server,handle_msg,5},{proc_lib,init_p_do_apply,3}]}],info},{log_entry,{1294,91120,813213},'ns_1@10.2.1.101',ns_memcached,1,\"Bucket ~p loaded on node ~p in ~p seconds.\",[\"default\",'ns_1@10.2.1.101',0],info}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},undefined,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}\ny(4) ns_log\ny(5) <0.171.0>\n\n0x053b2e44 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,5}]}, {heap_size,6765}, {total_heap_size,13530}, {links,[<10870.57.0>,<10870.171.0>]}, {memory,54576}, {message_queue_len,0}, {reductions,6688}, {trap_exit,false}]}, {<10870.173.0>, [{registered_name,ns_log_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00b1c190 (unknown function)\nCP: 0x00b1c194 ()\narity = 3\n proc_lib\n wake_up\n [gen_event,wake_hib,[<0.171.0>,ns_log_events,[{handler,ns_mail_log,false,{state},false}],[]]]\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,8}]}, {heap_size,34}, {total_heap_size,34}, {links,[<10870.171.0>]}, {memory,572}, {message_queue_len,0}, {reductions,5131}, {trap_exit,true}]}, {<10870.174.0>, [{registered_name,ns_mail_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030263c0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_mail_sup},one_for_all,[{child,undefined,ns_mail_log,{ns_mail_log,start_link,[]},transient,10,worker,[ns_mail_log]},{child,<0.175.0>,ns_mail,{ns_mail,start_link,[]},permanent,10,worker,[ns_mail]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_mail_sup,[]}\ny(4) ns_mail_sup\ny(5) <0.171.0>\n\n0x030263dc Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<10870.175.0>,<10870.171.0>]}, {memory,2896}, {message_queue_len,0}, {reductions,664}, {trap_exit,true}]}, {<10870.175.0>, [{registered_name,ns_mail}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ebf488 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_mail\ny(3) empty_state\ny(4) ns_mail\ny(5) <0.174.0>\n\n0x00ebf4a4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.174.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,27}, {trap_exit,true}]}, {<10870.177.0>, [{registered_name,ns_node_disco_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eb0170 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_node_disco_sup},rest_for_one,[{child,<0.191.0>,ns_config_rep,{ns_config_rep,start_link,[]},permanent,10,worker,[ns_config_rep]},{child,undefined,ns_node_disco_conf_events,{ns_node_disco_conf_events,start_link,[]},transient,10,worker,[]},{child,undefined,ns_node_disco_log,{ns_node_disco_log,start_link,[]},transient,10,worker,[]},{child,<0.179.0>,ns_node_disco,{ns_node_disco,start_link,[]},permanent,10,worker,[]},{child,<0.178.0>,ns_node_disco_events,{gen_event,start_link,[{local,ns_node_disco_events}]},permanent,10,worker,[]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_node_disco_sup,[]}\ny(4) ns_node_disco_sup\ny(5) <0.171.0>\n\n0x00eb018c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,4}]}, {heap_size,233}, {total_heap_size,610}, {links, [<10870.179.0>,<10870.191.0>,<10870.178.0>, <10870.171.0>]}, {memory,2936}, {message_queue_len,0}, {reductions,805}, {trap_exit,true}]}, {<10870.178.0>, [{registered_name,ns_node_disco_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00b1c190 (unknown function)\nCP: 0x00b1c194 ()\narity = 3\n proc_lib\n wake_up\n [gen_event,wake_hib,[<0.177.0>,ns_node_disco_events,[{handler,menelaus_event,ns_node_disco_events,{state,ns_node_disco_events,undefined,[{<0.218.0>,#Ref<0.0.0.1286>}]},false},{handler,ns_node_disco_rep_events,false,{state},false},{handler,ns_node_disco_log,false,{state},false}],[]]]\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,66}, {total_heap_size,66}, {links,[<10870.177.0>]}, {memory,740}, {message_queue_len,0}, {reductions,111}, {trap_exit,true}]}, {<10870.179.0>, [{registered_name,ns_node_disco}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x05a626b4 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_node_disco\ny(3) {state,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'],{interval,#Ref<0.0.0.836>}}\ny(4) ns_node_disco\ny(5) <0.177.0>\n\n0x05a626d0 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,184}]}, {heap_size,46368}, {total_heap_size,53133}, {links,[<10870.177.0>,<10870.57.0>]}, {memory,212988}, {message_queue_len,0}, {reductions,243491}, {trap_exit,false}]}, {<10870.182.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,do_setup,6}}, {backtrace, <<"Program counter: 0x0495fd74 (dist_util:con_loop/9 + 72)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04eff7d4 Return addr 0x00b1c194 ()\ny(0) []\ny(1) #Fun\ny(2) #Fun\ny(3) {tick,20972,9223,2,2}\ny(4) normal\ny(5) 'ns_1@10.2.1.102'\ny(6) {net_address,{{10,2,1,100},21100},\"10.2.1.100\",tcp,inet}\ny(7) #Port<0.3247>\ny(8) 'ns_1@10.2.1.100'\ny(9) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,16}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<10870.21.0>,#Port<10870.3247>]}, {memory,12240}, {message_queue_len,0}, {reductions,4087}, {trap_exit,false}]}, {<10870.186.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,do_accept,6}}, {backtrace, <<"Program counter: 0x0495fd74 (dist_util:con_loop/9 + 72)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bd9670 Return addr 0x00b1c194 ()\ny(0) []\ny(1) #Fun\ny(2) #Fun\ny(3) {tick,10337,6347,2,2}\ny(4) normal\ny(5) 'ns_1@10.2.1.102'\ny(6) {net_address,{{10,2,1,101},56618},\"10.2.1.101\",tcp,inet}\ny(7) #Port<0.3249>\ny(8) 'ns_1@10.2.1.101'\ny(9) <0.21.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,15}]}, {heap_size,233}, {total_heap_size,610}, {links,[<10870.21.0>,#Port<10870.3249>]}, {memory,2836}, {message_queue_len,0}, {reductions,3720}, {trap_exit,false}]}, {<10870.191.0>, [{registered_name,ns_config_rep}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0559a91c Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_config_rep\ny(3) {state}\ny(4) ns_config_rep\ny(5) <0.177.0>\n\n0x0559a938 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,26}]}, {heap_size,46368}, {total_heap_size,75025}, {links,[<10870.177.0>]}, {memory,300536}, {message_queue_len,0}, {reductions,42693}, {trap_exit,false}]}, {<10870.199.0>, [{registered_name,ns_heart}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c5d110 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_heart\ny(3) [{replication,[{\"default\",0.000000e+000}]},{system_memory_data,[{total_memory,4284698624},{free_memory,933093376},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{2210316,0}},{context_switches,{223166,0}},{garbage_collection,{63145,320518908,0}},{io,{{input,23485359},{output,21305805}}},{reductions,{129761022,609670}},{run_queue,0},{runtime,{12058,46}}]}]\ny(4) ns_heart\ny(5) <0.171.0>\n\n0x00c5d12c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,314}]}, {heap_size,6765}, {total_heap_size,53133}, {links,[<10870.171.0>,<10870.57.0>]}, {memory,212988}, {message_queue_len,0}, {reductions,3957929}, {trap_exit,false}]}, {<10870.200.0>, [{registered_name,ns_doctor}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c63ad0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_doctor\ny(3) {state,{dict,3,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[['ns_1@10.2.1.100',{last_heard,{1294,91286,984000}},{active_buckets,[\"default\"]},{memory,[{total,22125376},{processes,13685396},{processes_used,13666276},{system,8439980},{atom,560789},{atom_used,558343},{binary,181288},{code,4582580},{ets,1693668}]},{cluster_compatibility_version,1},{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{inets,\"5.2\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,3299},{memory_data,{4284698624,4184846336,{<10833.299.0>,6656756}}},{disk_data,[{\"C:\\\",48162864,60},{\"D:\\\",51279476,0},{\"G:\\\",34724465,17}]},{replication,[{\"default\",5.000000e-001}]},{system_memory_data,[{total_memory,4284698624},{free_memory,92594176},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{3290395,0}},{context_switches,{966801,0}},{garbage_collection,{172672,1926558401,0}},{io,{{input,101363188},{output,50652200}}},{reductions,{792350692,811632}},{run_queue,0},{runtime,{46067,32}}]}]],[['ns_1@10.2.1.101',{last_heard,{1294,91286,641000}},{active_buckets,[\"default\"]},{memory,[{total,19416288},{processes,11039004},{processes_used,11028940},{system,8377284},{atom,559813},{atom_used,556363},{binary,246232},{code,4551541},{ets,1606372}]},{cluster_compatibility_version,1},{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{inets,\"5.2\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,3229},{memory_data,{4284698624,4210270208,{<11129.387.0>,5385512}}},{disk_data,[{\"C:\\\",46243100,46},{\"D:\\\",51809624,0},{\"G:\\\",33929248,18}]},{replication,[{\"default\",0.000000e+000}]},{system_memory_data,[{total_memory,4284698624},{free_memory,87326720},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{3222470,1575}},{context_switches,{571544,0}},{garbage_collection,{128134,1245695829,0}},{io,{{input,79617831},{output,38541094}}},{reductions,{396374397,3168310}},{run_queue,0},{runtime,{30981,265}}]}]],[['ns_1@10.2.1.102',{last_heard,{1294,91286,407001}},{active_buckets,[\"default\"]},{memory,[{total,16515376},{processes,8680636},{processes_used,8667668},{system,7834740},{atom,541077},{atom_used,528868},{binary,326616},{code,4280811},{ets,1273900}]},{cluster_compatibility_version,1},{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,2217},{memory_data,{4284698624,3351322624,{<0.307.0>,4114268}}},{disk_data,[{\"C:\\\",49423972,41},{\"D:\\\",52797620,0},{\"G:\\\",34724465,17}]},{replication,[{\"default\",0.000000e+000}]},{system_memory_data,[{total_memory,4284698624},{free_memory,933093376},{system_total_memory,4284698624}]},{statistics,[{wall_clock,{2210316,0}},{context_switches,{223166,0}},{garbage_collection,{63145,320518908,0}},{io,{{input,23485359},{output,21305805}}},{reductions,{129761022,609670}},{run_queue,0},{runtime,{12058,46}}]}]],[],[],[],[],[],[],[],[],[],[],[],[],[]}}}}\ny(4) ns_doctor\ny(5) <0.171.0>\n\n0x00c63aec Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,654}]}, {heap_size,6765}, {total_heap_size,9349}, {links,[<10870.171.0>,<10870.57.0>]}, {memory,37852}, {message_queue_len,0}, {reductions,279031}, {trap_exit,false}]}, {<10870.216.0>, [{registered_name,menelaus_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bd7c10 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,menelaus_sup},one_for_one,[{child,<0.507.0>,hot_keys_keeper,{hot_keys_keeper,start_link,[]},permanent,5000,worker,dynamic},{child,undefined,menelaus_event,{menelaus_event,start_link,[]},transient,5000,worker,dynamic},{child,<0.217.0>,menelaus_web,{menelaus_web,start_link,[]},permanent,5000,worker,dynamic}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},10,10,[{1294,89174,559001},{1294,89169,552002},{1294,89164,544002}],menelaus_sup,[]}\ny(4) menelaus_sup\ny(5) <0.171.0>\n\n0x04bd7c2c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,4181}, {total_heap_size,32838}, {links,[<10870.217.0>,<10870.507.0>,<10870.171.0>]}, {memory,131828}, {message_queue_len,0}, {reductions,4564}, {trap_exit,true}]}, {<10870.217.0>, [{registered_name,menelaus_web}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ed9810 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mochiweb_socket_server\ny(3) {mochiweb_socket_server,8091,#Fun,{local,menelaus_web},2047,{0,0,0,0},#Port<0.3254>,<0.281.0>,128}\ny(4) menelaus_web\ny(5) <0.216.0>\n\n0x00ed982c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,377}, {total_heap_size,987}, {links, [<10870.216.0>,<10870.218.0>,<10870.281.0>, #Port<10870.3254>]}, {memory,4444}, {message_queue_len,0}, {reductions,335}, {trap_exit,true}]}, {<10870.218.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x04348d18 (menelaus_web:handle_streaming/4 + 196)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0536942c Return addr 0x04347108 (menelaus_web:loop/3 + 12136)\ny(0) {struct,[{buckets,[{struct,[{name,<<7 bytes>>},{nodeLocator,vbucket},{saslPassword,<<0 bytes>>},{nodes,[{struct,[{replication,5.000000e-001},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]}]},{vBucketServerMap,{struct,[{hashAlgorithm,<<3 bytes>>},{numReplicas,1},{serverList,[<<16 bytes>>,<<16 bytes>>,<<16 bytes>>]},{vBucketMap,[[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1]]}]}}]}]}]}\ny(1) {struct,[{buckets,[{struct,[{name,<<7 bytes>>},{nodeLocator,vbucket},{saslPassword,<<0 bytes>>},{nodes,[{struct,[{replication,5.000000e-001},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]},{struct,[{replication,0.000000e+000},{clusterMembership,<<6 bytes>>},{status,<<7 bytes>>},{hostname,<<15 bytes>>},{clusterCompatibility,1},{version,<<6 bytes>>},{os,<<7 bytes>>},{ports,{struct,[{proxy,11211},{direct,11210}]}}]}]},{vBucketServerMap,{struct,[{hashAlgorithm,<<3 bytes>>},{numReplicas,1},{serverList,[<<16 bytes>>,<<16 bytes>>,<<16 bytes>>]},{vBucketMap,[[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,0],[2,1],[2,1],[2,0],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[2,-1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1],[0,1]]}]}}]}]}]}\ny(2) {mochiweb_response,{mochiweb_request,#Port<0.3380>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}},200,{6,{\"pragma\",{\"Pragma\",\"no-cache\"},{\"cache-control\",{\"Cache-Control\",\"no-cache no-store max-age=0\"},nil,{\"content-type\",{\"Content-Type\",\"application/json; charset=utf-8\"},nil,{\"date\",{\"Date\",\"Mon, 03 Jan 2011 21:11:29 GMT\"},nil,nil}}},{\"server\",{\"Server\",\"Membase Server 1.6.5r\"},nil,{\"transfer-encoding\",{\"Transfer-Encoding\",\"chunked\"},nil,nil}}}}}\ny(3) {mochiweb_request,#Port<0.3380>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}}\ny(4) #Fun\n\n0x05369444 Return addr 0x043539f8 (mochiweb_http:headers/5 + 680)\ny(0) []\ny(1) []\ny(2) []\ny(3) []\ny(4) {mochiweb_request,#Port<0.3380>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}}\ny(5) Catch 0x04347118 (menelaus_web:loop/3 + 12152)\n\n0x05369460 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) []\ny(2) []\ny(3) {mochiweb_request,#Port<0.3380>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},{3,{\"authorization\",{'Authorization',\"Basic QWRtaW5pc3RyYXRvcjpqNDk1OHBo\"},{\"accept\",{'Accept',\"*/*\"},nil,nil},{\"host\",{'Host',\"127.0.0.1:8091\"},nil,nil}}}}\n\n0x05369474 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,29}]}, {heap_size,75025}, {total_heap_size,150050}, {links,[<10870.217.0>,#Port<10870.3380>]}, {memory,600736}, {message_queue_len,0}, {reductions,9767093}, {trap_exit,false}]}, {<10870.221.0>, [{registered_name,ns_port_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b826d4 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_port_sup},one_for_one,[{child,<0.225.0>,{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",\"11210\",\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",\"admin=_admin;default_bucket_name=default;auto_create=false\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]},{supervisor_cushion,start_link,[memcached,5000,ns_port_server,start_link,[memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",\"11210\",\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",\"admin=_admin;default_bucket_name=default;auto_create=false\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]]]},permanent,10,worker,[ns_port_server]},{child,<0.223.0>,{moxi,\"./bin/moxi/moxi\",[\"-Z\",\"port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",\"-z\",\"url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming\",\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",\"Administrator\"},{\"MOXI_SASL_PLAIN_PWD\",\"j4958ph\"}]},use_stdio,stderr_to_stdout,stream]},{supervisor_cushion,start_link,[moxi,5000,ns_port_server,start_link,[moxi,\"./bin/moxi/moxi\",[\"-Z\",\"port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",\"-z\",\"url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming\",\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",[]],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",\"Administrator\"},{\"MOXI_SASL_PLAIN_PWD\",\"j4958ph\"}]},use_stdio,stderr_to_stdout,stream]]]},permanent,10,worker,[ns_port_server]},{child,undefined,ns_port_init,{ns_port_init,start_link,[]},transient,10,worker,[]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_port_sup,[]}\ny(4) ns_port_sup\ny(5) <0.171.0>\n\n0x04b826f0 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,46368}, {total_heap_size,75025}, {links,[<10870.223.0>,<10870.225.0>,<10870.171.0>]}, {memory,300576}, {message_queue_len,0}, {reductions,7425}, {trap_exit,true}]}, {<10870.223.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x03047f10 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_cushion\ny(3) {state,moxi,5000,{1294,89089,429003},<0.224.0>}\ny(4) <0.223.0>\ny(5) <0.221.0>\n\n0x03047f2c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,1597}, {total_heap_size,1597}, {links,[<10870.221.0>,<10870.224.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,149}, {trap_exit,true}]}, {<10870.224.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00e86a14 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_port_server\ny(3) {state,#Port<0.3255>,moxi,{[\"2011-01-03 13:11:29: (cproxy_config.c.325) env: MOXI_SASL_PLAIN_PWD (7)\",\"2011-01-03 13:11:29: (cproxy_config.c.316) env: MOXI_SASL_PLAIN_USR (13)\"],[empty]},undefined,[],0}\ny(4) <0.224.0>\ny(5) <0.223.0>\n\n0x00e86a30 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,2584}, {total_heap_size,5168}, {links,[<10870.223.0>,#Port<10870.3255>]}, {memory,21128}, {message_queue_len,0}, {reductions,305}, {trap_exit,true}]}, {<10870.225.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0310ca98 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor_cushion\ny(3) {state,memcached,5000,{1294,89089,429004},<0.226.0>}\ny(4) <0.225.0>\ny(5) <0.221.0>\n\n0x0310cab4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,987}, {total_heap_size,987}, {links,[<10870.221.0>,<10870.226.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,55}, {trap_exit,true}]}, {<10870.226.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x053bc044 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_port_server\ny(3) {state,#Port<0.3256>,memcached,{[\"Suspend eq_tapq:anon_1 for 1.00 secs\",\"Suspend eq_tapq:anon_1 for 1.00 secs\"],[\"Suspend eq_tapq:anon_1 for 1.00 secs\"]},undefined,[],0}\ny(4) <0.226.0>\ny(5) <0.225.0>\n\n0x053bc060 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,31}]}, {heap_size,2584}, {total_heap_size,4181}, {links,[<10870.225.0>,#Port<10870.3256>]}, {memory,17180}, {message_queue_len,0}, {reductions,18990}, {trap_exit,true}]}, {<10870.227.0>, [{registered_name,ns_tick_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00e34db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bee3a8 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,ns_pubsub,#Ref<0.0.0.1225>,{state,#Fun,ignored},<0.268.0>}]\ny(3) ns_tick_event\ny(4) <0.171.0>\n\n0x04bee3c0 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,117}]}, {heap_size,610}, {total_heap_size,987}, {links,[<10870.171.0>,<10870.268.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,24441}, {trap_exit,true}]}, {<10870.228.0>, [{registered_name,ns_stats_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00e34db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c4d51c Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,ns_pubsub,#Ref<0.0.0.1474>,{state,#Fun,ignored},<0.269.0>}]\ny(3) ns_stats_event\ny(4) <0.171.0>\n\n0x00c4d534 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,704}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<10870.171.0>,<10870.269.0>]}, {memory,10792}, {message_queue_len,0}, {reductions,64076}, {trap_exit,true}]}, {<10870.229.0>, [{registered_name,ns_good_bucket_worker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c6ba70 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) work_queue\ny(3) []\ny(4) ns_good_bucket_worker\ny(5) <0.171.0>\n\n0x00c6ba8c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,233}, {total_heap_size,610}, {links,[<10870.171.0>]}, {memory,2876}, {message_queue_len,0}, {reductions,167}, {trap_exit,false}]}, {<10870.230.0>, [{registered_name,ns_good_bucket_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x030efb50 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_good_bucket_sup},one_for_one,[{child,<0.342.0>,{ns_memcached,\"default\"},{ns_memcached,start_link,[\"default\"]},permanent,86400000,worker,[ns_memcached]},{child,<0.231.0>,{ns_vbm_sup,\"default\"},{ns_vbm_sup,start_link,[\"default\"]},permanent,1000,worker,[ns_vbm_sup]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},3,10,[{1294,89094,952000}],ns_bucket_sup,{ns_good_bucket_sup,#Fun,ns_good_bucket_worker}}\ny(4) ns_good_bucket_sup\ny(5) <0.171.0>\n\n0x030efb6c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links, [<10870.171.0>,<10870.231.0>,<10870.342.0>, <10870.65.0>]}, {memory,71340}, {message_queue_len,0}, {reductions,1557}, {trap_exit,true}]}, {<10870.231.0>, [{registered_name,'ns_vbm_sup-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bed088 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,'ns_vbm_sup-default'},one_for_one,[{child,<0.2984.0>,{child_id,[634,631,630,627,626,623,622,619,618,615,614,611,610,607,606,603,602,599,598,595,594,591,590,587,586,583,582,579,578,575,574,571,570,567,566,563,562,559,558,555,554,551,550,547,546,543,542,539,538,535,534,531,530,527,526,523,522,519,518,515,514,70,69,66,65,62,61,58,57,54,53],'ns_1@10.2.1.100'},{ns_port_server,start_link,[vbucketmigrator,\"./bin/vbucketmigrator/vbucketmigrator\",[\"-e\",\"-a\",\"default\",\"-h\",\"10.2.1.102:11210\",\"-d\",\"10.2.1.100:11210\",\"-A\",\"-v\",\"-b\",\"634\",\"-b\",\"631\",\"-b\",\"630\",\"-b\",\"627\",\"-b\",\"626\",\"-b\",\"623\",\"-b\",\"622\",\"-b\",\"619\",\"-b\",\"618\",\"-b\",\"615\",\"-b\",\"614\",\"-b\",\"611\",\"-b\",\"610\",\"-b\",\"607\",\"-b\",\"606\",\"-b\",\"603\",\"-b\",\"602\",\"-b\",\"599\",\"-b\",\"598\",\"-b\",\"595\",\"-b\",\"594\",\"-b\",\"591\",\"-b\",\"590\",\"-b\",\"587\",\"-b\",\"586\",\"-b\",\"583\",\"-b\",\"582\",\"-b\",\"579\",\"-b\",\"578\",\"-b\",\"575\",\"-b\",\"574\",\"-b\",\"571\",\"-b\",\"570\",\"-b\",\"567\",\"-b\",\"566\",\"-b\",\"563\",\"-b\",\"562\",\"-b\",\"559\",\"-b\",\"558\",\"-b\",\"555\",\"-b\",\"554\",\"-b\",\"551\",\"-b\",\"550\",\"-b\",\"547\",\"-b\",\"546\",\"-b\",\"543\",\"-b\",\"542\",\"-b\",\"539\",\"-b\",\"538\",\"-b\",\"535\",\"-b\",\"534\",\"-b\",\"531\",\"-b\",\"530\",\"-b\",\"527\",\"-b\",\"526\",\"-b\",\"523\",\"-b\",\"522\",\"-b\",\"519\",\"-b\",\"518\",\"-b\",\"515\",\"-b\",\"514\",\"-b\",\"70\",\"-b\",\"69\",\"-b\",\"66\",\"-b\",\"65\",\"-b\",\"62\",\"-b\",\"61\",\"-b\",\"58\",\"-b\",\"57\",\"-b\",\"54\",\"-b\",\"53\"],[use_stdio,stderr_to_stdout,{write_data,[[],\"\\n\"]}]]},permanent,10,worker,[ns_port_server]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_vbm_sup,[]}\ny(4) 'ns_vbm_sup-default'\ny(5) <0.230.0>\n\n0x04bed0a4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,16}]}, {heap_size,987}, {total_heap_size,7752}, {links,[<10870.230.0>,<10870.2984.0>]}, {memory,31464}, {message_queue_len,0}, {reductions,35169}, {trap_exit,true}]}, {<10870.233.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03f1ac8c (misc:wait_for_process/2 + 104)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0310e178 Return addr 0x03f1da10 (misc:'-start_singleton/4-fun-0-'/2 + 56)\ny(0) []\ny(1) #Ref<0.0.0.1067>\ny(2) infinity\n\n0x0310e188 Return addr 0x00b1c194 ()\ny(0) <10833.110.0>\ny(1) ns_orchestrator\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.171.0>]}, {memory,1348}, {message_queue_len,0}, {reductions,6}, {trap_exit,false}]}, {<10870.234.0>, [{registered_name,ns_mnesia}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00e841a8 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_mnesia\ny(3) {state}\ny(4) ns_mnesia\ny(5) <0.171.0>\n\n0x00e841c4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,3}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<10870.243.0>,<10870.171.0>]}, {memory,10792}, {message_queue_len,0}, {reductions,1477}, {trap_exit,true}]}, {<10870.235.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03f1de58 (misc:'-wait_for_process/2-fun-0-'/3 + 112)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eae064 Return addr 0x00b1c194 ()\ny(0) []\ny(1) <0.233.0>\ny(2) #Ref<0.0.0.1067>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.110.0>]}, {memory,1364}, {message_queue_len,0}, {reductions,13}, {trap_exit,true}]}, {<10870.237.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0326fdb8 (application_master:main_loop/2 + 32)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00e828b4 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) {state,<0.238.0>,{appl_data,mnesia,[mnesia_dumper_load_regulator,mnesia_event,mnesia_fallback,mnesia_controller,mnesia_kernel_sup,mnesia_late_loader,mnesia_locker,mnesia_monitor,mnesia_recover,mnesia_substr,mnesia_sup,mnesia_tm],undefined,{mnesia_sup,[]},[mnesia,mnesia_backup,mnesia_bup,mnesia_checkpoint,mnesia_checkpoint_sup,mnesia_controller,mnesia_dumper,mnesia_event,mnesia_frag,mnesia_frag_hash,mnesia_frag_old_hash,mnesia_index,mnesia_kernel_sup,mnesia_late_loader,mnesia_lib,mnesia_loader,mnesia_locker,mnesia_log,mnesia_monitor,mnesia_recover,mnesia_registry,mnesia_schema,mnesia_snmp_hook,mnesia_snmp_sup,mnesia_subscr,mnesia_sup,mnesia_sp,mnesia_text,mnesia_tm],[],infinity,infinity},[],0,<0.31.0>}\ny(2) <0.7.0>\n\n0x00e828c4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<10870.7.0>,<10870.238.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,81}, {trap_exit,true}]}, {<10870.238.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, <<"Program counter: 0x032710fc (application_master:loop_it/4 + 40)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eb1280 Return addr 0x00b1c194 ()\ny(0) {normal,[]}\ny(1) mnesia_sup\ny(2) <0.239.0>\ny(3) <0.237.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.237.0>,<10870.239.0>]}, {memory,1328}, {message_queue_len,0}, {reductions,32}, {trap_exit,true}]}, {<10870.239.0>, [{registered_name,mnesia_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bd8578 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_sup},one_for_all,[{child,<0.241.0>,mnesia_kernel_sup,{mnesia_kernel_sup,start,[]},permanent,infinity,supervisor,[mnesia_kernel_sup,supervisor]},{child,<0.240.0>,mnesia_event,{mnesia_sup,start_event,[]},permanent,30000,worker,[mnesia_event,gen_event]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,3600,[],mnesia_sup,[[]]}\ny(4) mnesia_sup\ny(5) <0.238.0>\n\n0x04bd8594 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<10870.240.0>,<10870.241.0>,<10870.238.0>]}, {memory,3492}, {message_queue_len,0}, {reductions,198}, {trap_exit,true}]}, {<10870.240.0>, [{registered_name,mnesia_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00e34db4 (gen_event:fetch_msg/5 + 44)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bd254c Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) false\ny(1) []\ny(2) [{handler,mnesia_event,false,{state,[],false,[]},false}]\ny(3) mnesia_event\ny(4) <0.239.0>\n\n0x04bd2564 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<10870.239.0>,<10870.243.0>]}, {memory,6844}, {message_queue_len,0}, {reductions,398}, {trap_exit,true}]}, {<10870.241.0>, [{registered_name,mnesia_kernel_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bdad68 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_kernel_sup},one_for_all,[{child,<0.250.0>,mnesia_late_loader,{mnesia_late_loader,start,[]},permanent,3000,worker,[mnesia_late_loader,mnesia_monitor,proc_lib]},{child,<0.249.0>,mnesia_controller,{mnesia_controller,start,[]},permanent,3000,worker,[mnesia_controller,gen_server]},{child,<0.248.0>,mnesia_snmp_sup,{mnesia_snmp_sup,start,[]},permanent,infinity,supervisor,[mnesia_snmp_sup,supervisor]},{child,<0.247.0>,mnesia_checkpoint_sup,{mnesia_checkpoint_sup,start,[]},permanent,infinity,supervisor,[mnesia_checkpoint_sup,supervisor]},{child,<0.246.0>,mnesia_tm,{mnesia_tm,start,[]},permanent,30000,worker,[mnesia_tm,mnesia_monitor,proc_lib]},{child,<0.245.0>,mnesia_recover,{mnesia_recover,start,[]},permanent,180000,worker,[mnesia_recover,gen_server]},{child,<0.244.0>,mnesia_locker,{mnesia_locker,start,[]},permanent,3000,worker,[mnesia_locker,mnesia_monitor,proc_lib]},{child,<0.243.0>,mnesia_subscr,{mnesia_subscr,start,[]},permanent,3000,worker,[mnesia_subscr,gen_server]},{child,<0.242.0>,mnesia_monitor,{mnesia_monitor,start,[]},permanent,3000,worker,[mnesia_monitor,gen_server]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,86400000,[],mnesia_kernel_sup,[]}\ny(4) mnesia_kernel_sup\ny(5) <0.239.0>\n\n0x04bdad84 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,7}]}, {heap_size,377}, {total_heap_size,754}, {links, [<10870.244.0>,<10870.248.0>,<10870.249.0>, <10870.250.0>,<10870.246.0>,<10870.247.0>, <10870.245.0>,<10870.242.0>,<10870.243.0>, <10870.239.0>]}, {memory,3632}, {message_queue_len,0}, {reductions,551}, {trap_exit,true}]}, {<10870.242.0>, [{registered_name,mnesia_monitor}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x053985f8 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_monitor\ny(3) {state,<0.241.0>,[],[],true,[],undefined,[]}\ny(4) mnesia_monitor\ny(5) <0.241.0>\n\n0x05398614 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,35}]}, {heap_size,233}, {total_heap_size,843}, {links,[<10870.253.0>,<10870.241.0>]}, {memory,3828}, {message_queue_len,0}, {reductions,7448}, {trap_exit,true}]}, {<10870.243.0>, [{registered_name,mnesia_subscr}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eadca0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_subscr\ny(3) {state,<0.241.0>,49180}\ny(4) mnesia_subscr\ny(5) <0.241.0>\n\n0x00eadcbc Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.240.0>,<10870.241.0>,<10870.234.0>]}, {memory,1408}, {message_queue_len,0}, {reductions,111}, {trap_exit,true}]}, {<10870.244.0>, [{registered_name,mnesia_locker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x043cb7e8 (mnesia_locker:loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04c96868 Return addr 0x0439f248 (mnesia_sp:init_proc/4 + 132)\ny(0) []\ny(1) []\ny(2) []\ny(3) []\ny(4) []\ny(5) {state,<0.241.0>}\n\n0x04c96884 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) Catch 0x0439f248 (mnesia_sp:init_proc/4 + 132)\ny(1) mnesia_locker\ny(2) []\ny(3) []\ny(4) [<0.241.0>]\n\n0x04c9689c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,749}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<10870.241.0>]}, {memory,8332}, {message_queue_len,0}, {reductions,324435}, {trap_exit,true}]}, {<10870.245.0>, [{registered_name,mnesia_recover}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0539afa4 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_recover\ny(3) {state,<0.241.0>,undefined,undefined,undefined,0,true,[]}\ny(4) mnesia_recover\ny(5) <0.241.0>\n\n0x0539afc0 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,7}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<10870.241.0>,<10870.57.0>]}, {memory,12300}, {message_queue_len,0}, {reductions,4308}, {trap_exit,true}]}, {<10870.246.0>, [{registered_name,mnesia_tm}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x043d3c14 (mnesia_tm:doit_loop/1 + 108)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04caffb4 Return addr 0x0439f248 (mnesia_sp:init_proc/4 + 132)\ny(0) []\ny(1) []\ny(2) {state,{0,nil},{0,nil},<0.241.0>,[],[],[]}\ny(3) []\ny(4) []\ny(5) <0.241.0>\ny(6) {0,nil}\ny(7) {0,nil}\n\n0x04caffd8 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) Catch 0x0439f248 (mnesia_sp:init_proc/4 + 132)\ny(1) mnesia_tm\ny(2) []\ny(3) []\ny(4) [<0.241.0>]\n\n0x04cafff0 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,679}]}, {heap_size,610}, {total_heap_size,987}, {links,[<10870.241.0>]}, {memory,4384}, {message_queue_len,0}, {reductions,742548}, {trap_exit,true}]}, {<10870.247.0>, [{registered_name,mnesia_checkpoint_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bd9a28 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_checkpoint_sup},simple_one_for_one,[{child,undefined,mnesia_checkpoint_sup,{mnesia_checkpoint,start,[]},transient,3000,worker,[mnesia_checkpoint_sup,mnesia_checkpoint,supervisor]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,86400000,[],mnesia_checkpoint_sup,[]}\ny(4) mnesia_checkpoint_sup\ny(5) <0.241.0>\n\n0x04bd9a44 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.241.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,61}, {trap_exit,true}]}, {<10870.248.0>, [{registered_name,mnesia_snmp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bdcc68 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,mnesia_snmp_sup},simple_one_for_one,[{child,undefined,mnesia_snmp_sup,{mnesia_snmp_hook,start,[]},transient,3000,worker,[mnesia_snmp_sup,mnesia_snmp_hook,supervisor]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},0,86400000,[],mnesia_snmp_sup,[]}\ny(4) mnesia_snmp_sup\ny(5) <0.241.0>\n\n0x04bdcc84 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.241.0>]}, {memory,1368}, {message_queue_len,0}, {reductions,61}, {trap_exit,true}]}, {<10870.249.0>, [{registered_name,mnesia_controller}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04bdc8bc Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) mnesia_controller\ny(3) {state,<0.241.0>,true,[],[],{0,nil},[],[],{0,nil},undefined,[],[],{interval,#Ref<0.0.0.1107>},false}\ny(4) mnesia_controller\ny(5) <0.241.0>\n\n0x04bdc8d8 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,5}]}, {heap_size,610}, {total_heap_size,987}, {links,[<10870.241.0>,<10870.57.0>]}, {memory,4404}, {message_queue_len,0}, {reductions,867}, {trap_exit,true}]}, {<10870.250.0>, [{registered_name,mnesia_late_loader}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x0442f28c (mnesia_late_loader:loop/1 + 20)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x0453fdb4 Return addr 0x0439f248 (mnesia_sp:init_proc/4 + 132)\ny(0) []\ny(1) []\ny(2) {state,<0.241.0>}\n\n0x0453fdc4 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) Catch 0x0439f248 (mnesia_sp:init_proc/4 + 132)\ny(1) mnesia_late_loader\ny(2) []\ny(3) []\ny(4) [<0.241.0>]\n\n0x0453fddc Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<10870.241.0>]}, {memory,1944}, {message_queue_len,0}, {reductions,178}, {trap_exit,false}]}, {<10870.253.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x048953e8 (disk_log:loop/1 + 84)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04f010ec Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) {state,[],[],<0.129.0>,<0.130.0>,99,{arg,latest_log,undefined,\"c:/Program Files/Membase/Server/Mnesia.ns_1@10.2.1.102/LATEST.LOG\",true,infinity,halt,false,internal,<0.242.0>,none,read_write,true,[{notify,true},{file,\"c:/Program Files/Membase/Server/Mnesia.ns_1@10.2.1.102/LATEST.LOG\"},{name,latest_log},{repair,true},{mode,read_write}]},ok,ok}\n\n0x04f010f4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,1597}, {total_heap_size,1597}, {links, [<10870.130.0>,<10870.242.0>,<10870.129.0>, #Port<10870.3945>]}, {memory,6884}, {message_queue_len,0}, {reductions,234956}, {trap_exit,true}]}, {<10870.266.0>, [{registered_name,ns_bad_bucket_worker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c4fd90 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) work_queue\ny(3) []\ny(4) ns_bad_bucket_worker\ny(5) <0.171.0>\n\n0x00c4fdac Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<10870.171.0>]}, {memory,6824}, {message_queue_len,0}, {reductions,221}, {trap_exit,false}]}, {<10870.267.0>, [{registered_name,ns_bad_bucket_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04b93b98 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_bad_bucket_sup},one_for_one,[{child,<0.307.0>,{stats_reader,\"default\"},{stats_reader,start_link,[\"default\"]},permanent,10,worker,[stats_reader]},{child,<0.269.0>,{stats_archiver,\"default\"},{stats_archiver,start_link,[\"default\"]},permanent,10,worker,[stats_archiver]},{child,<0.268.0>,{stats_collector,\"default\"},{stats_collector,start_link,[\"default\"]},permanent,10,worker,[stats_collector]}],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},3,10,[],ns_bucket_sup,{ns_bad_bucket_sup,#Fun,ns_bad_bucket_worker}}\ny(4) ns_bad_bucket_sup\ny(5) <0.171.0>\n\n0x04b93bb4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links, [<10870.171.0>,<10870.269.0>,<10870.307.0>, <10870.268.0>,<10870.65.0>]}, {memory,71360}, {message_queue_len,0}, {reductions,1595}, {trap_exit,true}]}, {<10870.268.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x053a1960 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) stats_collector\ny(3) {state,\"default\",[3278110465,64869396,0,0,0,0,923409,0,0,0,0,0,0,0,0,470865,3011173,162567,0,0,0,0,469811,0,0],11,1294091287748}\ny(4) <0.268.0>\ny(5) <0.267.0>\n\n0x053a197c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,1}]}, {heap_size,6765}, {total_heap_size,17711}, {links,[<10870.267.0>,<10870.227.0>]}, {memory,71300}, {message_queue_len,0}, {reductions,15252247}, {trap_exit,false}]}, {<10870.269.0>, [{registered_name,'stats_archiver-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04efc000 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) stats_archiver\ny(3) {state,\"default\"}\ny(4) 'stats_archiver-default'\ny(5) <0.267.0>\n\n0x04efc01c Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,58}]}, {heap_size,4181}, {total_heap_size,21892}, {links,[<10870.228.0>,<10870.267.0>,<10870.57.0>]}, {memory,88044}, {message_queue_len,0}, {reductions,11283941}, {trap_exit,false}]}, {<10870.281.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x00e292d8 (prim_inet:accept0/2 + 92)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00eddf50 Return addr 0x032d8894 (inet_tcp:accept/1 + 20)\ny(0) 127\ny(1) #Port<0.3254>\n\n0x00eddf5c Return addr 0x0435ad2c (mochiweb_socket_server:acceptor_loop/1 + 80)\ny(0) []\n\n0x00eddf64 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) #Fun\ny(1) <0.217.0>\ny(2) Catch 0x0435ad2c (mochiweb_socket_server:acceptor_loop/1 + 80)\n\n0x00eddf74 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<10870.217.0>]}, {memory,1984}, {message_queue_len,0}, {reductions,18}, {trap_exit,false}]}, {<10870.307.0>, [{registered_name,'stats_reader-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x060a62e8 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) stats_reader\ny(3) {state,\"default\"}\ny(4) 'stats_reader-default'\ny(5) <0.267.0>\n\n0x060a6304 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,31}]}, {heap_size,514229}, {total_heap_size,1028458}, {links,[<10870.267.0>]}, {memory,4114268}, {message_queue_len,0}, {reductions,2728740}, {trap_exit,false}]}, {<10870.308.0>, [{registered_name,ns_moxi_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x044d6e28 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) supervisor\ny(3) {state,{local,ns_moxi_sup},one_for_one,[],{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}},20,10,[],ns_moxi_sup,[]}\ny(4) ns_moxi_sup\ny(5) <0.171.0>\n\n0x044d6e44 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links,[<10870.171.0>,<10870.65.0>]}, {memory,71300}, {message_queue_len,0}, {reductions,2139}, {trap_exit,true}]}, {<10870.309.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03f1ac8c (misc:wait_for_process/2 + 104)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04469160 Return addr 0x03f1da10 (misc:'-start_singleton/4-fun-0-'/2 + 56)\ny(0) []\ny(1) #Ref<0.0.0.1480>\ny(2) infinity\n\n0x04469170 Return addr 0x00b1c194 ()\ny(0) <10833.152.0>\ny(1) ns_tick\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<10870.171.0>]}, {memory,1348}, {message_queue_len,0}, {reductions,6}, {trap_exit,false}]}, {<10870.310.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x03f1de58 (misc:'-wait_for_process/2-fun-0-'/3 + 112)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00ec0434 Return addr 0x00b1c194 ()\ny(0) []\ny(1) <0.309.0>\ny(2) #Ref<0.0.0.1480>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.152.0>]}, {memory,1364}, {message_queue_len,0}, {reductions,13}, {trap_exit,true}]}, {<10870.342.0>, [{registered_name,'ns_memcached-default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04ee69c8 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_memcached\ny(3) {state,\"default\",#Port<0.3462>}\ny(4) <0.342.0>\ny(5) <0.230.0>\n\n0x04ee69e4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,25}]}, {heap_size,6765}, {total_heap_size,13530}, {links, [#Port<10870.3448>,#Port<10870.3457>, #Port<10870.3462>,<10870.230.0>,<10870.57.0>, #Port<10870.3460>,#Port<10870.3451>, #Port<10870.3453>,#Port<10870.3450>, #Port<10870.3438>,#Port<10870.3445>, #Port<10870.3447>,#Port<10870.3442>, #Port<10870.3433>,#Port<10870.3436>, #Port<10870.3430>]}, {memory,54856}, {message_queue_len,0}, {reductions,67434655}, {trap_exit,true}]}, {<10870.507.0>, [{registered_name,hot_keys_keeper}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x04c661b8 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) hot_keys_keeper\ny(3) {state,[{\"default\",[{\"Assdfdf88545244\",[{ops,3.267974e-003}]},{\"Assdfdf1304300285\",[{ops,3.267974e-003}]},{\"Assdfdf623240301\",[{ops,3.267974e-003}]},{\"Assdfdf796192375\",[{ops,3.267974e-003}]},{\"Assdfdf331631225\",[{ops,3.267974e-003}]},{\"Assdfdf88251782\",[{ops,3.267974e-003}]},{\"Assdfdf17166682\",[{ops,3.267974e-003}]},{\"Assdfdf1876246878\",[{ops,3.267974e-003}]},{\"Assdfdf475099662\",[{ops,3.267974e-003}]},{\"Assdfdf1397088614\",[{ops,3.267974e-003}]}]}],[{\"default\",[{\"Assdfdf1709533126\",[{ops,1.631321e-003}]},{\"Assdfdf1404454720\",[{ops,1.631321e-003}]},{\"Assdfdf226835789\",[{ops,1.631321e-003}]},{\"Assdfdf192342381\",[{ops,1.631321e-003}]},{\"Assdfdf106771016\",[{ops,1.631321e-003}]},{\"Assdfdf361091196\",[{ops,1.631321e-003}]},{\"Assdfdf1511934702\",[{ops,1.631321e-003}]},{\"Assdfdf1454320804\",[{ops,1.631321e-003}]},{\"Assdfdf2059888953\",[{ops,1.631321e-003}]},{\"Assdfdf1191035435\",[{ops,1.631321e-003}]},{\"Assdfdf1002576547\",[{ops,1.631321e-003}]},{\"Assdfdf1077235129\",[{ops,1.631321e-003}]},{\"Assdfdf1048236034\",[{ops,1.631321e-003}]},{\"Assdfdf1345529704\",[{ops,1.631321e-003}]},{\"Assdfdf1058703016\",[{ops,1.631321e-003}]},{\"Assdfdf1147637406\",[{ops,1.631321e-003}]},{\"Assdfdf1533714056\",[{ops,1.631321e-003}]},{\"Assdfdf545930075\",[{ops,1.631321e-003}]},{\"Assdfdf6834555\",[{ops,1.631321e-003}]},{\"Assdfdf1177019534\",[{ops,1.631321e-003}]},{\"Assdfdf1107722776\",[{ops,1.631321e-003}]},{\"Assdfdf1296347410\",[{ops,1.631321e-003}]},{\"Assdfdf504131672\",[{ops,1.631321e-003}]},{\"Assdfdf1608486250\",[{ops,1.631321e-003}]},{\"Assdfdf602352902\",[{ops,1.631321e-003}]},{\"Assdfdf1090556094\",[{ops,1.631321e-003}]},{\"Assdfdf1200474718\",[{ops,1.631321e-003}]},{\"Assdfdf799599629\",[{ops,1.631321e-003}]},{\"Assdfdf2118101519\",[{ops,1.631321e-003}]},{\"Assdfdf239571217\",[{ops,1.631321e-003}]},{\"Assdfdf1533144348\",[{ops,1.631321e-003}]},{\"Assdfdf278485659\",[{ops,1.631321e-003}]},{\"Assdfdf1981290821\",[{ops,1.631321e-003}]},{\"Assdfdf1595082700\",[{ops,1.631321e-003}]},{\"Assdfdf1397088614\",[{ops,1.631321e-003}]},{\"Assdfdf475099662\",[{ops,1.631321e-003}]},{\"Assdfdf1196676958\",[{ops,1.631321e-003}]},{\"Assdfdf2117493601\",[{ops,1.631321e-003}]},{\"Assdfdf1876246878\",[{ops,1.631321e-003}]},{\"Assdfdf1426865825\",[{ops,1.631321e-003}]},{\"Assdfdf17166682\",[{ops,1.631321e-003}]},{\"Assdfdf1421467409\",[{ops,1.631321e-003}]},{\"Assdfdf72780157\",[{ops,1.631321e-003}]},{\"Assdfdf88251782\",[{ops,1.631321e-003}]},{\"Assdfdf625275034\",[{ops,1.631321e-003}]},{\"Assdfdf1288059078\",[{ops,1.631321e-003}]},{\"Assdfdf856022456\",[{ops,1.631321e-003}]},{\"Assdfdf2096513354\",[{ops,1.631321e-003}]},{\"Assdfdf331631225\",[{ops,1.631321e-003}]},{\"Assdfdf111730197\",[{ops,1.631321e-003}]},{\"Assdfdf250717573\",[{ops,1.631321e-003}]},{\"Assdfdf962290143\",[{ops,1.631321e-003}]},{\"Assdfdf796192375\",[{ops,1.631321e-003}]},{\"Assdfdf974739469\",[{ops,1.631321e-003}]},{\"Assdfdf312885755\",[{ops,1.631321e-003}]},{\"Assdfdf1463196561\",[{ops,1.631321e-003}]},{\"Assdfdf1529028110\",[{ops,1.631321e-003}]},{\"Assdfdf642641198\",[{ops,1.631321e-003}]},{\"Assdfdf32568220\",[{ops,1.631321e-003}]},{\"Assdfdf701091359\",[{ops,1.631321e-003}]},{\"Assdfdf1994932167\",[{ops,1.631321e-003}]},{\"Assdfdf1324987543\",[{ops,1.631321e-003}]},{\"Assdfdf647270594\",[{ops,1.631321e-003}]},{\"Assdfdf1221818520\",[{ops,1.631321e-003}]},{\"Assdfdf1640362316\",[{ops,1.631321e-003}]},{\"Assdfdf623240301\",[{ops,1.631321e-003}]},{\"Assdfdf820141458\",[{ops,1.631321e-003}]},{\"Assdfdf386610799\",[{ops,1.631321e-003}]},{\"Assdfdf1461986773\",[{ops,1.631321e-003}]},{\"Assdfdf684003296\",[{ops,1.631321e-003}]},{\"Assdfdf1020775042\",[{ops,1.631321e-003}]},{\"Assdfdf721701622\",[{ops,1.631321e-003}]},{\"Assdfdf2107749255\",[{ops,1.631321e-003}]},{\"Assdfdf1222761010\",[{ops,1.631321e-003}]},{\"Assdfdf1837129101\",[{ops,1.631321e-003}]},{\"Assdfdf814022896\",[{ops,1.631321e-003}]},{\"Assdfdf645506544\",[{ops,1.631321e-003}]},{\"Assdfdf922345339\",[{ops,1.631321e-003}]},{\"Assdfdf1493126454\",[{ops,1.631321e-003}]},{\"Assdfdf573901280\",[{ops,1.631321e-003}]},{\"Assdfdf559325804\",[{ops,1.631321e-003}]},{\"Assdfdf1773979199\",[{ops,1.631321e-003}]},{\"Assdfdf165942096\",[{ops,1.631321e-003}]},{\"Assdfdf2052759757\",[{ops,1.631321e-003}]},{\"Assdfdf239225763\",[{ops,1.631321e-003}]},{\"Assdfdf508335602\",[{ops,1.631321e-003}]},{\"Assdfdf1304300285\",[{ops,1.631321e-003}]},{\"Assdfdf1166283405\",[{ops,1.631321e-003}]},{\"Assdfdf9894592\",[{ops,1.631321e-003}]},{\"Assdfdf648859730\",[{ops,1.631321e-003}]},{\"Assdfdf367576103\",[{ops,1.631321e-003}]},{\"Assdfdf1059803788\",[{ops,1.631321e-003}]},{\"Assdfdf1414423573\",[{ops,1.631321e-003}]},{\"Assdfdf2131855057\",[{ops,1.631321e-003}]},{\"Assdfdf620855889\",[{ops,1.631321e-003}]},{\"Assdfdf1152307615\",[{ops,1.631321e-003}]},{\"Assdfdf1413197931\",[{ops,1.631321e-003}]},{\"Assdfdf1773610943\",[{ops,1.631321e-003}]},{\"Assdfdf188382281\",[{ops,1.631321e-003}]},{\"Assdfdf88545244\",[{ops,1.631321e-003}]}]}],<0.3763.0>}\ny(4) hot_keys_keeper\ny(5) <0.216.0>\n\n0x04c661d4 Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,60}]}, {heap_size,17711}, {total_heap_size,46368}, {links,[<10870.216.0>,<10870.57.0>]}, {memory,185928}, {message_queue_len,0}, {reductions,125302}, {trap_exit,false}]}, {<10870.2984.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, <<"Program counter: 0x03239e3c (gen_server:loop/6 + 144)\nCP: 0x00000000 (invalid)\narity = 0\n\n0x00c685c0 Return addr 0x00e4d830 (proc_lib:init_p_do_apply/3 + 28)\ny(0) []\ny(1) infinity\ny(2) ns_port_server\ny(3) {state,#Port<0.3835>,vbucketmigrator,{[\"Authenticated towards: {Sock 10.2.1.102:11210}\",\"Authenticating towards: {Sock 10.2.1.102:11210}\"],[\"Connecting to {Sock 10.2.1.102:11210}\"]},undefined,[],0}\ny(4) <0.2984.0>\ny(5) <0.231.0>\n\n0x00c685dc Return addr 0x00b1c194 ()\ny(0) Catch 0x00e4d840 (proc_lib:init_p_do_apply/3 + 44)\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1974}, {links,[<10870.231.0>,#Port<10870.3835>]}, {memory,8352}, {message_queue_len,0}, {reductions,338}, {trap_exit,true}]}, {<10870.3767.0>, [{registered_name,[]}, {status,running}, {initial_call,{erlang,apply,2}}, {backtrace, <<"Program counter: 0x00d62cd0 (unknown function)\nCP: 0x0512868c (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 56)\n\n0x0555ba70 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) []\ny(1) <0.3767.0>\n\n0x0555ba7c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<584 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1974},{links,[<0.231.0>,#Port<0.3835>]},{memory,8352},{message_queue_len,0},{reductions,338},{trap_exit,true}]\ny(1) <0.2984.0>\n\n0x0555ba88 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,hot_keys_keeper},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<5217 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,60}]},{heap_size,17711},{total_heap_size,46368},{links,[<0.216.0>,<0.57.0>]},{memory,185928},{message_queue_len,0},{reductions,125302},{trap_exit,false}]\ny(1) <0.507.0>\n\n0x0555ba94 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'ns_memcached-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<415 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,25}]},{heap_size,6765},{total_heap_size,13530},{links,[#Port<0.3448>,#Port<0.3457>,#Port<0.3462>,<0.230.0>,<0.57.0>,#Port<0.3460>,#Port<0.3451>,#Port<0.3453>,#Port<0.3450>,#Port<0.3438>,#Port<0.3445>,#Port<0.3447>,#Port<0.3442>,#Port<0.3433>,#Port<0.3436>,#Port<0.3430>]},{memory,54856},{message_queue_len,0},{reductions,67434655},{trap_exit,true}]\ny(1) <0.342.0>\n\n0x0555baa0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<230 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10833.152.0>]},{memory,1364},{message_queue_len,0},{reductions,13},{trap_exit,true}]\ny(1) <0.310.0>\n\n0x0555baac Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<334 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.171.0>]},{memory,1348},{message_queue_len,0},{reductions,6},{trap_exit,false}]\ny(1) <0.309.0>\n\n0x0555bab8 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_moxi_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<575 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,17711},{total_heap_size,17711},{links,[<0.171.0>,<0.65.0>]},{memory,71300},{message_queue_len,0},{reductions,2139},{trap_exit,true}]\ny(1) <0.308.0>\n\n0x0555bac4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'stats_reader-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<414 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,31}]},{heap_size,514229},{total_heap_size,1028458},{links,[<0.267.0>]},{memory,4114268},{message_queue_len,0},{reductions,2728740},{trap_exit,false}]\ny(1) <0.307.0>\n\n0x0555bad0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<606 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.217.0>]},{memory,1984},{message_queue_len,0},{reductions,18},{trap_exit,false}]\ny(1) <0.281.0>\n\n0x0555badc Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'stats_archiver-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<418 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,58}]},{heap_size,4181},{total_heap_size,21892},{links,[<0.228.0>,<0.267.0>,<0.57.0>]},{memory,88044},{message_queue_len,0},{reductions,11283941},{trap_exit,false}]\ny(1) <0.269.0>\n\n0x0555bae8 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<515 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,6765},{total_heap_size,17711},{links,[<0.267.0>,<0.227.0>]},{memory,71300},{message_queue_len,0},{reductions,15252247},{trap_exit,false}]\ny(1) <0.268.0>\n\n0x0555baf4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_bad_bucket_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1019 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,17711},{total_heap_size,17711},{links,[<0.171.0>,<0.269.0>,<0.307.0>,<0.268.0>,<0.65.0>]},{memory,71360},{message_queue_len,0},{reductions,1595},{trap_exit,true}]\ny(1) <0.267.0>\n\n0x0555bb00 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_bad_bucket_worker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<395 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1597},{links,[<0.171.0>]},{memory,6824},{message_queue_len,0},{reductions,221},{trap_exit,false}]\ny(1) <0.266.0>\n\n0x0555bb0c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<634 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,1597},{total_heap_size,1597},{links,[<0.130.0>,<0.242.0>,<0.129.0>,#Port<0.3945>]},{memory,6884},{message_queue_len,0},{reductions,234956},{trap_exit,true}]\ny(1) <0.253.0>\n\n0x0555bb18 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_late_loader},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<536 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.241.0>]},{memory,1944},{message_queue_len,0},{reductions,178},{trap_exit,false}]\ny(1) <0.250.0>\n\n0x0555bb24 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_controller},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<497 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,610},{total_heap_size,987},{links,[<0.241.0>,<0.57.0>]},{memory,4404},{message_queue_len,0},{reductions,867},{trap_exit,true}]\ny(1) <0.249.0>\n\n0x0555bb30 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_snmp_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<728 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.241.0>]},{memory,1368},{message_queue_len,0},{reductions,61},{trap_exit,true}]\ny(1) <0.248.0>\n\n0x0555bb3c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_checkpoint_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<760 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.241.0>]},{memory,1368},{message_queue_len,0},{reductions,61},{trap_exit,true}]\ny(1) <0.247.0>\n\n0x0555bb48 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_tm},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<626 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,679}]},{heap_size,610},{total_heap_size,987},{links,[<0.241.0>]},{memory,4384},{message_queue_len,0},{reductions,742548},{trap_exit,true}]\ny(1) <0.246.0>\n\n0x0555bb54 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_recover},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<448 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,7}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.241.0>,<0.57.0>]},{memory,12300},{message_queue_len,0},{reductions,4308},{trap_exit,true}]\ny(1) <0.245.0>\n\n0x0555bb60 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_locker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<562 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,749}]},{heap_size,1597},{total_heap_size,1974},{links,[<0.241.0>]},{memory,8332},{message_queue_len,0},{reductions,324435},{trap_exit,true}]\ny(1) <0.244.0>\n\n0x0555bb6c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_subscr},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<412 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.240.0>,<0.241.0>,<0.234.0>]},{memory,1408},{message_queue_len,0},{reductions,111},{trap_exit,true}]\ny(1) <0.243.0>\n\n0x0555bb78 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_monitor},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<435 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,35}]},{heap_size,233},{total_heap_size,843},{links,[<0.253.0>,<0.241.0>]},{memory,3828},{message_queue_len,0},{reductions,7448},{trap_exit,true}]\ny(1) <0.242.0>\n\n0x0555bb84 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_kernel_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1660 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,7}]},{heap_size,377},{total_heap_size,754},{links,[<0.244.0>,<0.248.0>,<0.249.0>,<0.250.0>,<0.246.0>,<0.247.0>,<0.245.0>,<0.242.0>,<0.243.0>,<0.239.0>]},{memory,3632},{message_queue_len,0},{reductions,551},{trap_exit,true}]\ny(1) <0.241.0>\n\n0x0555bb90 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<421 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1597},{links,[<0.239.0>,<0.243.0>]},{memory,6844},{message_queue_len,0},{reductions,398},{trap_exit,true}]\ny(1) <0.240.0>\n\n0x0555bb9c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,mnesia_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<807 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<0.240.0>,<0.241.0>,<0.238.0>]},{memory,3492},{message_queue_len,0},{reductions,198},{trap_exit,true}]\ny(1) <0.239.0>\n\n0x0555bba8 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<244 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.237.0>,<0.239.0>]},{memory,1328},{message_queue_len,0},{reductions,32},{trap_exit,true}]\ny(1) <0.238.0>\n\n0x0555bbb4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1045 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,987},{total_heap_size,1597},{links,[<0.7.0>,<0.238.0>]},{memory,6844},{message_queue_len,0},{reductions,81},{trap_exit,true}]\ny(1) <0.237.0>\n\n0x0555bbc0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<230 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<10833.110.0>]},{memory,1364},{message_queue_len,0},{reductions,13},{trap_exit,true}]\ny(1) <0.235.0>\n\n0x0555bbcc Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_mnesia},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<388 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,1597},{total_heap_size,2584},{links,[<0.243.0>,<0.171.0>]},{memory,10792},{message_queue_len,0},{reductions,1477},{trap_exit,true}]\ny(1) <0.234.0>\n\n0x0555bbd8 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<342 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.171.0>]},{memory,1348},{message_queue_len,0},{reductions,6},{trap_exit,false}]\ny(1) <0.233.0>\n\n0x0555bbe4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,'ns_vbm_sup-default'},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1941 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,16}]},{heap_size,987},{total_heap_size,7752},{links,[<0.230.0>,<0.2984.0>]},{memory,31464},{message_queue_len,0},{reductions,35169},{trap_exit,true}]\ny(1) <0.231.0>\n\n0x0555bbf0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_good_bucket_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<914 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,17711},{total_heap_size,17711},{links,[<0.171.0>,<0.231.0>,<0.342.0>,<0.65.0>]},{memory,71340},{message_queue_len,0},{reductions,1557},{trap_exit,true}]\ny(1) <0.230.0>\n\n0x0555bbfc Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_good_bucket_worker},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<396 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,233},{total_heap_size,610},{links,[<0.171.0>]},{memory,2876},{message_queue_len,0},{reductions,167},{trap_exit,false}]\ny(1) <0.229.0>\n\n0x0555bc08 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_stats_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<458 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,704}]},{heap_size,1597},{total_heap_size,2584},{links,[<0.171.0>,<0.269.0>]},{memory,10792},{message_queue_len,0},{reductions,64076},{trap_exit,true}]\ny(1) <0.228.0>\n\n0x0555bc14 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_tick_event},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<457 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,117}]},{heap_size,610},{total_heap_size,987},{links,[<0.171.0>,<0.268.0>]},{memory,4404},{message_queue_len,0},{reductions,24441},{trap_exit,true}]\ny(1) <0.227.0>\n\n0x0555bc20 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<555 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,31}]},{heap_size,2584},{total_heap_size,4181},{links,[<0.225.0>,#Port<0.3256>]},{memory,17180},{message_queue_len,0},{reductions,18990},{trap_exit,true}]\ny(1) <0.226.0>\n\n0x0555bc2c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<442 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,987},{total_heap_size,987},{links,[<0.221.0>,<0.226.0>]},{memory,4404},{message_queue_len,0},{reductions,55},{trap_exit,true}]\ny(1) <0.225.0>\n\n0x0555bc38 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<588 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,2584},{total_heap_size,5168},{links,[<0.223.0>,#Port<0.3255>]},{memory,21128},{message_queue_len,0},{reductions,305},{trap_exit,true}]\ny(1) <0.224.0>\n\n0x0555bc44 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<437 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,1597},{total_heap_size,1597},{links,[<0.221.0>,<0.224.0>]},{memory,6844},{message_queue_len,0},{reductions,149},{trap_exit,true}]\ny(1) <0.223.0>\n\n0x0555bc50 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_port_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<2878 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,46368},{total_heap_size,75025},{links,[<0.223.0>,<0.225.0>,<0.171.0>]},{memory,300576},{message_queue_len,0},{reductions,7425},{trap_exit,true}]\ny(1) <0.221.0>\n\n0x0555bc5c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<16473 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,29}]},{heap_size,75025},{total_heap_size,150050},{links,[<0.217.0>,#Port<0.3380>]},{memory,600736},{message_queue_len,0},{reductions,9767093},{trap_exit,false}]\ny(1) <0.218.0>\n\n0x0555bc68 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,menelaus_web},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<522 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,377},{total_heap_size,987},{links,[<0.216.0>,<0.218.0>,<0.281.0>,#Port<0.3254>]},{memory,4444},{message_queue_len,0},{reductions,335},{trap_exit,true}]\ny(1) <0.217.0>\n\n0x0555bc74 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,menelaus_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<916 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,4181},{total_heap_size,32838},{links,[<0.217.0>,<0.507.0>,<0.171.0>]},{memory,131828},{message_queue_len,0},{reductions,4564},{trap_exit,true}]\ny(1) <0.216.0>\n\n0x0555bc80 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_doctor},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<3450 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,654}]},{heap_size,6765},{total_heap_size,9349},{links,[<0.171.0>,<0.57.0>]},{memory,37852},{message_queue_len,0},{reductions,279031},{trap_exit,false}]\ny(1) <0.200.0>\n\n0x0555bc8c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_heart},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<748 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,314}]},{heap_size,6765},{total_heap_size,53133},{links,[<0.171.0>,<0.57.0>]},{memory,212988},{message_queue_len,0},{reductions,3957929},{trap_exit,false}]\ny(1) <0.199.0>\n\n0x0555bc98 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config_rep},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<396 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,26}]},{heap_size,46368},{total_heap_size,75025},{links,[<0.177.0>]},{memory,300536},{message_queue_len,0},{reductions,42693},{trap_exit,false}]\ny(1) <0.191.0>\n\n0x0555bca4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,do_accept,6}},{backtrace,<<452 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,15}]},{heap_size,233},{total_heap_size,610},{links,[<0.21.0>,#Port<0.3249>]},{memory,2836},{message_queue_len,0},{reductions,3720},{trap_exit,false}]\ny(1) <0.186.0>\n\n0x0555bcb0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,do_setup,6}},{backtrace,<<452 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,16}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.21.0>,#Port<0.3247>]},{memory,12240},{message_queue_len,0},{reductions,4087},{trap_exit,false}]\ny(1) <0.182.0>\n\n0x0555bcbc Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_node_disco},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<479 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,184}]},{heap_size,46368},{total_heap_size,53133},{links,[<0.177.0>,<0.57.0>]},{memory,212988},{message_queue_len,0},{reductions,243491},{trap_exit,false}]\ny(1) <0.179.0>\n\n0x0555bcc8 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_node_disco_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<413 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,66},{total_heap_size,66},{links,[<0.177.0>]},{memory,740},{message_queue_len,0},{reductions,111},{trap_exit,true}]\ny(1) <0.178.0>\n\n0x0555bcd4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_node_disco_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1094 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,233},{total_heap_size,610},{links,[<0.179.0>,<0.191.0>,<0.178.0>,<0.171.0>]},{memory,2936},{message_queue_len,0},{reductions,805},{trap_exit,true}]\ny(1) <0.177.0>\n\n0x0555bce0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_mail},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<388 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.174.0>]},{memory,1368},{message_queue_len,0},{reductions,27},{trap_exit,true}]\ny(1) <0.175.0>\n\n0x0555bcec Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_mail_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<746 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<0.175.0>,<0.171.0>]},{memory,2896},{message_queue_len,0},{reductions,664},{trap_exit,true}]\ny(1) <0.174.0>\n\n0x0555bcf8 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_log_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<223 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,8}]},{heap_size,34},{total_heap_size,34},{links,[<0.171.0>]},{memory,572},{message_queue_len,0},{reductions,5131},{trap_exit,true}]\ny(1) <0.173.0>\n\n0x0555bd04 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_log},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<5914 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,6765},{total_heap_size,13530},{links,[<0.57.0>,<0.171.0>]},{memory,54576},{message_queue_len,0},{reductions,6688},{trap_exit,false}]\ny(1) <0.172.0>\n\n0x0555bd10 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_server_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<2546 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,46368},{total_heap_size,121393},{links,[<0.216.0>,<0.229.0>,<0.266.0>,<0.308.0>,<0.309.0>,<0.267.0>,<0.233.0>,<0.234.0>,<0.230.0>,<0.227.0>,<0.228.0>,<0.221.0>,<0.174.0>,<0.199.0>,<0.200.0>,<0.177.0>,<0.172.0>,<0.173.0>,<0.61.0>]},{memory,486368},{message_queue_len,0},{reductions,55933},{trap_exit,true}]\ny(1) <0.171.0>\n\n0x0555bd1c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,dets},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<404 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,10}]},{heap_size,610},{total_heap_size,1597},{links,[<0.34.0>]},{memory,6824},{message_queue_len,0},{reductions,1171},{trap_exit,true}]\ny(1) <0.134.0>\n\n0x0555bd28 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,dets_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<647 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,7}]},{heap_size,377},{total_heap_size,987},{links,[<0.34.0>]},{memory,4384},{message_queue_len,0},{reductions,720},{trap_exit,true}]\ny(1) <0.133.0>\n\n0x0555bd34 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,disk_log_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<402 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,25}]},{heap_size,1597},{total_heap_size,2584},{links,[<0.253.0>,<0.34.0>]},{memory,10792},{message_queue_len,0},{reductions,6194},{trap_exit,true}]\ny(1) <0.130.0>\n\n0x0555bd40 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,disk_log_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<691 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,21}]},{heap_size,377},{total_heap_size,987},{links,[<0.253.0>,<0.34.0>]},{memory,4404},{message_queue_len,0},{reductions,5272},{trap_exit,true}]\ny(1) <0.129.0>\n\n0x0555bd4c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<44579 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,15}]},{heap_size,75025},{total_heap_size,121393},{links,[<0.64.0>]},{memory,486008},{message_queue_len,0},{reductions,355416},{trap_exit,false}]\ny(1) <0.66.0>\n\n0x0555bd58 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config_events},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1347 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,835},{total_heap_size,835},{links,[<0.230.0>,<0.267.0>,<0.308.0>,<0.64.0>]},{memory,3876},{message_queue_len,0},{reductions,430155},{trap_exit,true}]\ny(1) <0.65.0>\n\n0x0555bd64 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_config_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1004 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,6765},{total_heap_size,7142},{links,[<0.65.0>,<0.66.0>,<0.61.0>]},{memory,29044},{message_queue_len,0},{reductions,1037},{trap_exit,true}]\ny(1) <0.64.0>\n\n0x0555bd70 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_cluster},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<389 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,10}]},{heap_size,10946},{total_heap_size,15127},{links,[<0.61.0>]},{memory,60944},{message_queue_len,0},{reductions,13559},{trap_exit,false}]\ny(1) <0.63.0>\n\n0x0555bd7c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,dist_manager},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<411 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.61.0>]},{memory,1368},{message_queue_len,0},{reductions,132},{trap_exit,false}]\ny(1) <0.62.0>\n\n0x0555bd88 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,ns_server_cluster_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1185 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,377},{total_heap_size,987},{links,[<0.62.0>,<0.64.0>,<0.171.0>,<0.63.0>,<0.60.0>]},{memory,4464},{message_queue_len,0},{reductions,2794},{trap_exit,true}]\ny(1) <0.61.0>\n\n0x0555bd94 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<232 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.59.0>,<0.61.0>]},{memory,1328},{message_queue_len,0},{reductions,50},{trap_exit,true}]\ny(1) <0.60.0>\n\n0x0555bda0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<739 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,377},{total_heap_size,377},{links,[<0.7.0>,<0.60.0>]},{memory,1964},{message_queue_len,0},{reductions,46},{trap_exit,true}]\ny(1) <0.59.0>\n\n0x0555bdac Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,timer_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<376 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,586}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.199.0>,<0.249.0>,<0.342.0>,<0.507.0>,<0.269.0>,<0.200.0>,<0.245.0>,<0.172.0>,<0.179.0>,<0.34.0>]},{memory,12460},{message_queue_len,0},{reductions,145568},{trap_exit,true}]\ny(1) <0.57.0>\n\n0x0555bdb8 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,memsup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<525 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,87}]},{heap_size,377},{total_heap_size,987},{links,[<0.52.0>]},{memory,4384},{message_queue_len,0},{reductions,88068},{trap_exit,true}]\ny(1) <0.55.0>\n\n0x0555bdc4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,disksup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<473 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,103}]},{heap_size,233},{total_heap_size,1830},{links,[<0.52.0>]},{memory,7756},{message_queue_len,0},{reductions,39319},{trap_exit,true}]\ny(1) <0.54.0>\n\n0x0555bdd0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,os_mon_sysinfo},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<411 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,53}]},{heap_size,1597},{total_heap_size,1974},{links,[<0.52.0>,#Port<0.1438>]},{memory,8352},{message_queue_len,0},{reductions,4846},{trap_exit,true}]\ny(1) <0.53.0>\n\n0x0555bddc Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,os_mon_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<828 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<0.53.0>,<0.54.0>,<0.55.0>,<0.51.0>]},{memory,3512},{message_queue_len,0},{reductions,274},{trap_exit,true}]\ny(1) <0.52.0>\n\n0x0555bde8 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<229 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.50.0>,<0.52.0>]},{memory,1328},{message_queue_len,0},{reductions,40},{trap_exit,true}]\ny(1) <0.51.0>\n\n0x0555bdf4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<568 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.7.0>,<0.51.0>]},{memory,1388},{message_queue_len,0},{reductions,23},{trap_exit,true}]\ny(1) <0.50.0>\n\n0x0555be00 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<351 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.32.0>]},{memory,1348},{message_queue_len,0},{reductions,14},{trap_exit,false}]\ny(1) <0.48.0>\n\n0x0555be0c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<223 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.32.0>]},{memory,1308},{message_queue_len,0},{reductions,8},{trap_exit,false}]\ny(1) <0.47.0>\n\n0x0555be18 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,release_handler},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<645 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,5}]},{heap_size,610},{total_heap_size,987},{links,[<0.41.0>]},{memory,4384},{message_queue_len,0},{reductions,1249},{trap_exit,false}]\ny(1) <0.45.0>\n\n0x0555be24 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,overload},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<433 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.42.0>]},{memory,1368},{message_queue_len,0},{reductions,39},{trap_exit,false}]\ny(1) <0.44.0>\n\n0x0555be30 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,alarm_handler},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<405 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.42.0>]},{memory,1368},{message_queue_len,0},{reductions,28},{trap_exit,true}]\ny(1) <0.43.0>\n\n0x0555be3c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,sasl_safe_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<748 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<0.43.0>,<0.44.0>,<0.41.0>]},{memory,2916},{message_queue_len,0},{reductions,174},{trap_exit,true}]\ny(1) <0.42.0>\n\n0x0555be48 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,sasl_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<774 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,233},{total_heap_size,610},{links,[<0.42.0>,<0.45.0>,<0.40.0>]},{memory,2916},{message_queue_len,0},{reductions,158},{trap_exit,true}]\ny(1) <0.41.0>\n\n0x0555be54 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<246 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.39.0>,<0.41.0>]},{memory,1328},{message_queue_len,0},{reductions,70},{trap_exit,true}]\ny(1) <0.40.0>\n\n0x0555be60 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<700 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.7.0>,<0.40.0>]},{memory,1388},{message_queue_len,0},{reductions,23},{trap_exit,true}]\ny(1) <0.39.0>\n\n0x0555be6c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,kernel_safe_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1044 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,610},{total_heap_size,987},{links,[<0.130.0>,<0.133.0>,<0.134.0>,<0.57.0>,<0.129.0>,<0.11.0>]},{memory,4484},{message_queue_len,0},{reductions,388},{trap_exit,true}]\ny(1) <0.34.0>\n\n0x0555be78 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<385 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.11.0>]},{memory,1368},{message_queue_len,0},{reductions,268},{trap_exit,true}]\ny(1) <0.33.0>\n\n0x0555be84 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<404 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,2584},{total_heap_size,20295},{links,[<0.47.0>,<0.48.0>,<0.31.0>]},{memory,81656},{message_queue_len,0},{reductions,5170},{trap_exit,true}]\ny(1) <0.32.0>\n\n0x0555be90 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,user},{status,waiting},{initial_call,{user,server,2}},{backtrace,<<728 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,35}]},{heap_size,1597},{total_heap_size,5778},{links,[<0.29.0>,<0.32.0>,#Port<0.830>,<0.6.0>]},{memory,23648},{message_queue_len,0},{reductions,40320},{trap_exit,true}]\ny(1) <0.31.0>\n\n0x0555be9c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<441 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,1597},{total_heap_size,1597},{links,[<0.11.0>,<0.31.0>]},{memory,6844},{message_queue_len,0},{reductions,166},{trap_exit,true}]\ny(1) <0.29.0>\n\n0x0555bea8 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,standard_error},{status,waiting},{initial_call,{standard_error,server,2}},{backtrace,<<187 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.27.0>,#Port<0.792>]},{memory,1388},{message_queue_len,0},{reductions,7},{trap_exit,true}]\ny(1) <0.28.0>\n\n0x0555beb4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,standard_error_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<464 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.11.0>,<0.28.0>]},{memory,1388},{message_queue_len,0},{reductions,40},{trap_exit,true}]\ny(1) <0.27.0>\n\n0x0555bec0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,code_server},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<2875 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,221}]},{heap_size,4181},{total_heap_size,21892},{links,[<0.11.0>]},{memory,87944},{message_queue_len,0},{reductions,193766},{trap_exit,true}]\ny(1) <0.26.0>\n\n0x0555becc Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,file_server_2},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<398 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1396}]},{heap_size,6765},{total_heap_size,13530},{links,[#Port<0.496>,<0.11.0>]},{memory,54576},{message_queue_len,0},{reductions,1945575},{trap_exit,true}]\ny(1) <0.25.0>\n\n0x0555bed8 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,global_group},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<456 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.11.0>]},{memory,1368},{message_queue_len,0},{reductions,76},{trap_exit,true}]\ny(1) <0.24.0>\n\n0x0555bee4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{net_kernel,ticker,2}},{backtrace,<<194 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.21.0>]},{memory,1308},{message_queue_len,0},{reductions,297},{trap_exit,false}]\ny(1) <0.23.0>\n\n0x0555bef0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,accept_loop,2}},{backtrace,<<385 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,4}]},{heap_size,377},{total_heap_size,754},{links,[<0.21.0>]},{memory,3432},{message_queue_len,0},{reductions,789},{trap_exit,false}]\ny(1) <0.22.0>\n\n0x0555befc Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,net_kernel},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<651 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,10}]},{heap_size,2584},{total_heap_size,2961},{links,[<0.23.0>,<0.182.0>,<0.186.0>,<0.18.0>,<0.22.0>,#Port<0.460>]},{memory,12396},{message_queue_len,0},{reductions,3973},{trap_exit,true}]\ny(1) <0.21.0>\n\n0x0555bf08 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,auth},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<397 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,610},{total_heap_size,987},{links,[<0.18.0>]},{memory,4384},{message_queue_len,0},{reductions,397},{trap_exit,true}]\ny(1) <0.20.0>\n\n0x0555bf14 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,erl_epmd},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<409 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.18.0>,#Port<0.473>]},{memory,1388},{message_queue_len,0},{reductions,135},{trap_exit,false}]\ny(1) <0.19.0>\n\n0x0555bf20 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,net_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<870 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,987},{links,[<0.19.0>,<0.20.0>,<0.21.0>,<0.11.0>]},{memory,4444},{message_queue_len,0},{reductions,265},{trap_exit,true}]\ny(1) <0.18.0>\n\n0x0555bf2c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,inet_db},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<498 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,17}]},{heap_size,377},{total_heap_size,754},{links,[<0.11.0>]},{memory,3452},{message_queue_len,0},{reductions,1754},{trap_exit,true}]\ny(1) <0.17.0>\n\n0x0555bf38 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<176 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,610},{total_heap_size,987},{links,[<0.13.0>]},{memory,4324},{message_queue_len,0},{reductions,249},{trap_exit,false}]\ny(1) <0.16.0>\n\n0x0555bf44 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<297 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,377},{total_heap_size,754},{links,[<0.13.0>]},{memory,3392},{message_queue_len,0},{reductions,232},{trap_exit,false}]\ny(1) <0.15.0>\n\n0x0555bf50 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<339 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,3}]},{heap_size,610},{total_heap_size,987},{links,[<0.13.0>]},{memory,4324},{message_queue_len,0},{reductions,393},{trap_exit,true}]\ny(1) <0.14.0>\n\n0x0555bf5c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,global_name_server},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<538 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,8}]},{heap_size,1597},{total_heap_size,1974},{links,[<0.14.0>,<0.16.0>,<0.15.0>,<0.11.0>]},{memory,8504},{message_queue_len,0},{reductions,2085},{trap_exit,true}]\ny(1) <0.13.0>\n\n0x0555bf68 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,rex},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<453 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,187}]},{heap_size,987},{total_heap_size,1364},{links,[<0.11.0>]},{memory,5996},{message_queue_len,0},{reductions,55620},{trap_exit,true}]\ny(1) <0.12.0>\n\n0x0555bf74 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,kernel_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1623 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,4181},{total_heap_size,8362},{links,[<0.26.0>,<0.29.0>,<0.33.0>,<0.34.0>,<0.27.0>,<0.17.0>,<0.24.0>,<0.25.0>,<0.18.0>,<0.12.0>,<0.13.0>,<0.10.0>]},{memory,34104},{message_queue_len,0},{reductions,3131},{trap_exit,true}]\ny(1) <0.11.0>\n\n0x0555bf80 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,4}},{backtrace,<<228 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,0}]},{heap_size,233},{total_heap_size,233},{links,[<0.9.0>,<0.11.0>]},{memory,1328},{message_queue_len,0},{reductions,72},{trap_exit,true}]\ny(1) <0.10.0>\n\n0x0555bf8c Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<1414 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,1}]},{heap_size,377},{total_heap_size,754},{links,[<0.7.0>,<0.10.0>]},{memory,3472},{message_queue_len,0},{reductions,44},{trap_exit,true}]\ny(1) <0.9.0>\n\n0x0555bf98 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,application_controller},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<530 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,25}]},{heap_size,2584},{total_heap_size,20295},{links,[<0.50.0>,<0.59.0>,<0.237.0>,<0.9.0>,<0.39.0>,<0.0.0>]},{memory,81716},{message_queue_len,0},{reductions,31489},{trap_exit,true}]\ny(1) <0.7.0>\n\n0x0555bfa4 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,error_logger},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtrace,<<552 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,2}]},{heap_size,4181},{total_heap_size,8362},{links,[<0.0.0>,<0.31.0>,#Port<0.1578>]},{memory,33924},{message_queue_len,0},{reductions,130765},{trap_exit,true}]\ny(1) <0.6.0>\n\n0x0555bfb0 Return addr 0x051286ac (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 88)\ny(0) [{registered_name,erl_prim_loader},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,<<620 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,362}]},{heap_size,1597},{total_heap_size,12543},{links,[#Port<0.1>,<0.0.0>]},{memory,50568},{message_queue_len,0},{reductions,1020771},{trap_exit,true}]\ny(1) <0.3.0>\n\n0x0555bfbc Return addr 0x05127894 (diag_handler:do_diag_per_node/0 + 112)\ny(0) [{registered_name,init},{status,waiting},{initial_call,{otp_ring0,start,2}},{backtrace,<<830 bytes>>},{error_handler,error_handler},{garbage_collection,[{fullsweep_after,65535},{minor_gcs,91}]},{heap_size,1597},{total_heap_size,3194},{links,[<0.6.0>,<0.7.0>,<0.3.0>]},{memory,13192},{message_queue_len,0},{reductions,27650},{trap_exit,true}]\ny(1) <0.0.0>\n\n0x0555bfc8 Return addr 0x0328a77c (rpc:'-handle_call_call/6-fun-0-'/5 + 104)\ny(0) []\ny(1) []\ny(2) [{version,[{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]},{system_arch,\"windows\"},{wall_clock,2218},{memory_data,{4284698624,3351322624,{<0.307.0>,4114268}}},{disk_data,[{\"C:\\\",49423972,41},{\"D:\\\",52797620,0},{\"G:\\\",34724465,17}]}]\ny(3) [{{node,'ns_1@10.2.1.101',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.102',memcached},[{port,11210},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{otp,[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{cookie,pmqchiglstnppkwf}]},{memory_quota,3268},{{node,'ns_1@10.2.1.102',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]},{{node,'ns_1@10.2.1.100',membership},active},{rebalance_status,{none,<<76 bytes>>}},{{node,'ns_1@10.2.1.101',membership},active},{rest_creds,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{creds,[{\"Administrator\",[{password,'filtered-out'}]}]}]},{buckets,[{'_vclock',[{'ns_1@10.2.1.100',{9,63461309965}}]},{configs,[{\"default\",[{num_replicas,1},{ram_quota,3426746368},{auth_type,sasl},{sasl_password,[]},{type,membase},{num_vbuckets,1024},{ht_size,3079},{tap_keepalive,0},{tap_noop_interval,20},{max_txn_size,1000},{ht_locks,5},{servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.101','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.101'],['ns_1@10.2.1.102','ns_1@10.2.1.100'],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.102',undefined],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101'],['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}]}]},{port_servers,[{moxi,\"./bin/moxi/moxi\",[\"-Z\",{\"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200\",[port]},\"-z\",{\"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming\",[{rest,port}]},\"-p\",\"0\",\"-Y\",\"y\",\"-O\",\"stderr\",{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MOXI_SASL_PLAIN_USR\",{\"~s\",[{ns_moxi_sup,rest_user,[]}]}},{\"MOXI_SASL_PLAIN_PWD\",{\"~s\",[{ns_moxi_sup,rest_pass,[]}]}}]},use_stdio,stderr_to_stdout,stream]},{memcached,\"./bin/memcached/memcached\",[\"-X\",\"./bin/memcached/stdin_term_handler.so\",\"-p\",{\"~B\",[port]},\"-E\",\"./bin/bucket_engine/bucket_engine.so\",\"-B\",\"binary\",\"-r\",\"-c\",\"10000\",\"-e\",{\"admin=~s;default_bucket_name=default;auto_create=false\",[admin_user]},{\"~s\",[verbosity]}],[{env,[{\"EVENT_NOSELECT\",\"1\"},{\"MEMCACHED_TOP_KEYS\",\"100\"},{\"ISASL_PWFILE\",{\"~s\",[{isasl,path}]}},{\"ISASL_DB_CHECK_TIME\",\"1\"}]},use_stdio,stderr_to_stdout,stream]}]},{alerts,[{email,[]},{email_alerts,false},{email_server,[{user,undefined},{pass,'filtered-out'},{addr,undefined},{port,undefined},{encrypt,false}]},{alerts,[server_down,server_unresponsive,server_up,server_joined,server_left,bucket_created,bucket_deleted,bucket_auth_failed]}]},{nodes_wanted,[{'_vclock',[{'ns_1@10.2.1.100',{2,63461308289}}]},'ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']},{rest,[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307269}}]},{port,8091}]},{{node,'ns_1@10.2.1.102',membership},active},{{node,'ns_1@10.2.1.100',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.101',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{{node,'ns_1@10.2.1.102',isasl},[{path,\"c:/Program Files/Membase/Server/data/ns_1/isasl.pw\"}]},{moxi,[{port,11211},{verbosity,[]}]},{replication,[{enabled,true}]},{{node,'ns_1@10.2.1.100',memcached},[{'_vclock',[{'ns_1@10.2.1.100',{1,63461307259}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.101',memcached},[{'_vclock',[{'ns_1@10.2.1.101',{1,63461307307}}]},{dbdir,\"c:/Program Files/Membase/Server/data/ns_1\"},{port,11210},{admin_user,\"_admin\"},{admin_pass,\"_admin\"},{bucket_engine,\"./bin/bucket_engine/bucket_engine.so\"},{engines,[{membase,[{engine,\"bin/ep_engine/ep.so\"},{initfile,\"priv/init.sql\"}]},{memcached,[{engine,\"bin/memcached/default_engine.so\"}]}]},{verbosity,[]}]},{{node,'ns_1@10.2.1.100',ns_log},[{filename,\"c:/Program Files/Membase/Server/data/ns_1/ns_log\"}]}]\ny(4) [\"bucket_engine_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r\",\"curl-7.21.1-w64_patched.tar.gz\r\",\"ep-engine_1.6.5r_4_g9d25ede-MINGW32_NT-6.0.i686.tar.gz\r\",\"libconflate_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"libevent-2.0.7-rc.tar.gz\r\",\"libmemcached-0.41_trond-norbye_mingw32-revno895.tar.gz\r\",\"libvbucket_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"membase-cli_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"memcached_1.4.4_359_g06c7d3b-MINGW32_NT-6.0.i686.tar.gz\r\",\"moxi_1.6.4-MINGW32_NT-6.0.i686.tar.gz\r\",\"ns_server_1.6.5r.tar.gz\r\",\"pthreads-w64-2-8-0-release.tar.gz\r\",\"vbucketmigrator_1.6.5r-MINGW32_NT-6.0.i686.tar.gz\r\",\"wallace_1.6.5r-2-gc6cf01c-win64-201012280140\r\"]\ny(5) [{os_mon,\"2.2.4\"},{mnesia,\"4.4.12\"},{kernel,\"2.13.4\"},{sasl,\"2.1.8\"},{ns_server,\"1.6.5r\"},{menelaus,\"1.6.5r\"},{stdlib,\"1.16.4\"}]\n\n0x0555bfe4 Return addr 0x00b1c194 ()\ny(0) Catch 0x0328a77c (rpc:'-handle_call_call/6-fun-0-'/5 + 104)\ny(1) []\ny(2) []\ny(3) []\ny(4) <0.12.0>\n">>}, {error_handler,error_handler}, {garbage_collection, [{fullsweep_after,65535},{minor_gcs,0}]}, {heap_size,28657}, {total_heap_size,28657}, {links,[]}, {memory,115024}, {message_queue_len,0}, {reductions,18419}, {trap_exit,false}]}]}, {memory,{4284698624,3351322624,{<10870.307.0>,4114268}}}, {disk, [{"C:\\",49423972,41}, {"D:\\",52797620,0}, {"G:\\",34724465,17}]}]}] nodes_info = [{struct,[{uptime,<<"3302">>}, {memoryTotal,4284698624}, {memoryFree,91607040}, {mcdMemoryReserved,3268}, {mcdMemoryAllocated,3268}, {otpNode,<<"ns_1@10.2.1.100">>}, {otpCookie,<<"pmqchiglstnppkwf">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {hostname,<<"10.2.1.100:8091">>}, {clusterCompatibility,1}, {version,<<"1.6.5r">>}, {os,<<"windows">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]}, {struct,[{uptime,<<"3232">>}, {memoryTotal,4284698624}, {memoryFree,74428416}, {mcdMemoryReserved,3268}, {mcdMemoryAllocated,3268}, {otpNode,<<"ns_1@10.2.1.101">>}, {otpCookie,<<"pmqchiglstnppkwf">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {hostname,<<"10.2.1.101:8091">>}, {clusterCompatibility,1}, {version,<<"1.6.5r">>}, {os,<<"windows">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]}, {struct,[{uptime,<<"2220">>}, {memoryTotal,4284698624}, {memoryFree,933376000}, {mcdMemoryReserved,3268}, {mcdMemoryAllocated,3268}, {otpNode,<<"ns_1@10.2.1.102">>}, {otpCookie,<<"pmqchiglstnppkwf">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {hostname,<<"10.2.1.102:8091">>}, {clusterCompatibility,1}, {version,<<"1.6.5r">>}, {os,<<"windows">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]}] buckets = [{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']}, {map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101'], ['ns_1@10.2.1.100','ns_1@10.2.1.101']]}]}] logs: ------------------------------- 2011-01-03 12:53:08.384 ns_node_disco:3:info:cookie update - Initial otp cookie generated: pmqchiglstnppkwf 2011-01-03 12:53:08.430 menelaus_app:1:info:web start ok - Membase Server has started on web port 8091 on node 'ns_1@10.2.1.100'. 2011-01-03 12:54:23.489 menelaus_web:12:info:message - Created bucket "default" of type: membase 2011-01-03 12:55:08.136 ns_node_disco:4:info:node up - Node 'ns_1@10.2.1.100' saw that node 'ns_1@10.2.1.101' came up. 2011-01-03 12:55:08.383 menelaus_app:1:info:web start ok - Membase Server has started on web port 8091 on node 'ns_1@10.2.1.101'. 2011-01-03 12:55:08.695 ns_cluster:3:info:message - Node ns_1@10.2.1.101 joined cluster 2011-01-03 12:55:15.374 ns_orchestrator:4:info:message - Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101'], EjectNodes = [] 2011-01-03 12:55:21.458 ns_orchestrator:2:info:message - Rebalance exited with reason wait_for_memcached_failed 2011-01-03 12:55:51.083 ns_memcached:1:info:message - Bucket "default" loaded on node 'ns_1@10.2.1.100' in 1 seconds. 2011-01-03 12:56:38.567 ns_memcached:1:info:message - Bucket "default" loaded on node 'ns_1@10.2.1.101' in 1 seconds. 2011-01-03 12:56:46.713 ns_orchestrator:1:info:message - Rebalance completed successfully. 2011-01-03 13:01:08.373 ns_orchestrator:4:info:message - Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101'], EjectNodes = [] (repeated 1 times) 2011-01-03 13:11:29.429 menelaus_app:1:info:web start ok - Membase Server has started on web port 8091 on node 'ns_1@10.2.1.102'. 2011-01-03 13:11:29.585 ns_node_disco:4:info:node up - Node 'ns_1@10.2.1.102' saw that node 'ns_1@10.2.1.101' came up. 2011-01-03 13:11:29.773 ns_cluster:3:info:message - Node ns_1@10.2.1.102 joined cluster 2011-01-03 13:11:29.788 ns_node_disco:4:info:node up - Node 'ns_1@10.2.1.100' saw that node 'ns_1@10.2.1.102' came up. 2011-01-03 13:11:30.304 ns_node_disco:4:info:node up - Node 'ns_1@10.2.1.101' saw that node 'ns_1@10.2.1.102' came up. 2011-01-03 13:11:32.596 ns_orchestrator:4:info:message - Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] 2011-01-03 13:11:38.696 ns_orchestrator:2:info:message - Rebalance exited with reason wait_for_memcached_failed 2011-01-03 13:12:56.806 ns_memcached:1:info:message - Bucket "default" loaded on node 'ns_1@10.2.1.102' in 1 seconds. 2011-01-03 13:17:08.372 ns_orchestrator:2:info:message - Rebalance exited with reason wait_for_memcached_failed (repeated 2 times) 2011-01-03 13:17:08.372 ns_orchestrator:4:info:message - Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] (repeated 3 times) 2011-01-03 13:24:02.961 ns_orchestrator:2:info:message - Rebalance exited with reason stopped 2011-01-03 13:24:21.634 ns_orchestrator:4:info:message - Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] 2011-01-03 13:39:18.702 ns_orchestrator:2:info:message - Rebalance exited with reason stopped 2011-01-03 13:39:25.067 ns_orchestrator:4:info:message - Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] 2011-01-03 13:39:38.796 ns_memcached:4:info:message - Control connection to memcached on 'ns_1@10.2.1.101' disconnected: {{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]} 2011-01-03 13:39:38.858 ns_orchestrator:2:info:message - Rebalance exited with reason {{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,633}, 30000]}} 2011-01-03 13:39:41.885 ns_memcached:1:info:message - Bucket "default" loaded on node 'ns_1@10.2.1.101' in 0 seconds. 2011-01-03 13:40:13.053 ns_orchestrator:2:info:message - Rebalance exited with reason {{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,65}, 30000]}} 2011-01-03 13:40:35.018 ns_orchestrator:2:info:message - Rebalance exited with reason {{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,70}, 30000]}} 2011-01-03 13:45:07.927 ns_memcached:1:info:message - Bucket "default" loaded on node 'ns_1@10.2.1.101' in 0 seconds. (repeated 9 times) 2011-01-03 13:45:07.927 ns_memcached:4:info:message - Control connection to memcached on 'ns_1@10.2.1.101' disconnected: {{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]} (repeated 9 times) 2011-01-03 13:45:08.378 ns_orchestrator:4:info:message - Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] (repeated 2 times) 2011-01-03 13:45:20.672 ns_memcached:4:info:message - Control connection to memcached on 'ns_1@10.2.1.101' disconnected: {{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]} 2011-01-03 13:45:20.813 ns_memcached:1:info:message - Bucket "default" loaded on node 'ns_1@10.2.1.101' in 0 seconds. logs_node: ------------------------------- INFO REPORT <0.61.0> 2011-01-03 12:53:08 =============================================================================== ns_1@10.2.1.100:log_os_info:25: OS type: {win32,nt} Version: {6,1,7600} Runtime info: [{otp_release,"R13B03"}, {erl_version,"5.7.4"}, {erl_version_long,"Erlang R13B03 (erts-5.7.4) [smp:4:4] [rq:4] [async-threads:16]\n"}, {system_arch_raw,"win32"}, {system_arch,"windows"}, {localtime,{{2011,1,3},{12,53,8}}}, {memory,[{total,4660064}, {processes,775796}, {processes_used,771348}, {system,3884268}, {atom,308853}, {atom_used,284695}, {binary,12920}, {code,2047049}, {ets,157684}]}, {loaded,[ns_info,log_os_info,ns_log_mf_h,ns_server_cluster_sup, ns_server,timer,io_lib_fread,memsup,disksup, os_mon_sysinfo,os_mon,unicode,io_lib_pretty, io_lib_format,io_lib,io,sasl_report,release_handler, calendar,overload,sets,alarm_handler,ordsets, sasl_report_tty_h,erl_lint,sasl,ram_file,beam_lib, ns_bootstrap,file_io_server,orddict,erl_eval,file,c, error_logger_tty_h,kernel_config,queue,shell,user, user_drv,user_sup,supervisor_bridge,standard_error, ets,gb_sets,hipe_unified_loader,packages,code_server, code,file_server,global_group,gen_tcp,inet_tcp,auth, erl_epmd,inet_tcp_dist,net_kernel,erl_distribution, filename,win32reg,inet_parse,inet,inet_udp,os, inet_config,inet_db,global,gb_trees,rpc,dict, supervisor,kernel,application_master,sys,application, gen_server,erl_parse,proplists,erl_scan,lists, application_controller,proc_lib,gen,gen_event, error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {applications,[{os_mon,"CPO CXC 138 46","2.2.4"}, {kernel,"ERTS CXC 138 10","2.13.4"}, {sasl,"SASL CXC 138 11","2.1.8"}, {ns_server,"Membase server","1.6.5r"}, {menelaus,"Membase menelaus","1.6.5r"}, {stdlib,"ERTS CXC 138 10","1.16.4"}]}, {pre_loaded,[erlang,erl_prim_loader,prim_zip,zlib,prim_file, prim_inet,init,otp_ring0]}, {process_count,50}, {node,'ns_1@10.2.1.100'}, {nodes,[]}, {registered,[auth,erl_prim_loader,code_server, application_controller,standard_error,init, timer_server,kernel_safe_sup,sasl_sup,rex,inet_db, net_sup,release_handler,kernel_sup,user, global_name_server,overload,file_server_2, error_logger,alarm_handler,net_kernel,os_mon_sup, standard_error_sup,global_group,sasl_safe_sup, os_mon_sysinfo,memsup,ns_server_cluster_sup, disksup,erl_epmd]}, {cookie,nocookie}, {wordsize,4}, {wall_clock,0}] INFO REPORT <0.62.0> 2011-01-03 12:53:08 =============================================================================== reading ip config from "priv/ip" PROGRESS REPORT <0.61.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<0.62.0>}, {name,dist_manager}, {mfa,{dist_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.61.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<0.63.0>}, {name,ns_cluster}, {mfa,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.64.0> 2011-01-03 12:53:08 =============================================================================== loading config from "priv/config" PROGRESS REPORT <0.64.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<0.65.0>}, {name,ns_config_events}, {mfa,{gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.64.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<0.66.0>}, {name,ns_config}, {mfa,{ns_config,start_link,["priv/config",ns_config_default]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.64.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<0.68.0>}, {name,ns_config_isasl_sync}, {mfa,{ns_config_isasl_sync,start_link,[]}}, {restart_type,transient}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== isasl_sync init: ["c:/Program Files/Membase/Server/data/ns_1/isasl.pw", "_admin","_admin"] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== isasl_sync init buckets: [] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== Writing isasl passwd file: "c:/Program Files/Membase/Server/data/ns_1/isasl.pw" PROGRESS REPORT <0.64.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<0.70.0>}, {name,ns_config_log}, {mfa,{ns_config_log,start_link,[]}}, {restart_type,transient}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.61.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<0.64.0>}, {name,ns_config_sup}, {mfa,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <0.72.0> 2011-01-03 12:53:08 =============================================================================== ns_log:init(): Couldn't load logs from "c:/Program Files/Membase/Server/data/ns_1/ns_log": {error, enoent} PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.72.0>}, {name,ns_log}, {mfa,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.73.0>}, {name,ns_log_events}, {mfa,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.74.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_mail_sup} started [{pid,<0.75.0>}, {name,ns_mail}, {mfa,{ns_mail,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.74.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_mail_sup} started [{pid,<0.76.0>}, {name,ns_mail_log}, {mfa,{ns_mail_log,start_link,[]}}, {restart_type,transient}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.74.0>}, {name,ns_mail_sup}, {mfa,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.77.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<0.78.0>}, {name,ns_node_disco_events}, {mfa,{gen_event,start_link,[{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.79.0> 2011-01-03 12:53:08 =============================================================================== Initting ns_node_disco with [] INFO REPORT <0.80.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.80.0> 2011-01-03 12:53:08 =============================================================================== ns_log: logging ns_node_disco:3:Initial otp cookie generated: pmqchiglstnppkwf INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: otp -> [{cookie,pmqchiglstnppkwf}] INFO REPORT <0.80.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.2.1.100'], with cookie: pmqchiglstnppkwf INFO REPORT <0.80.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.2.1.100'], with cookie: pmqchiglstnppkwf PROGRESS REPORT <0.77.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<0.79.0>}, {name,ns_node_disco}, {mfa,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.77.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<0.83.0>}, {name,ns_node_disco_log}, {mfa,{ns_node_disco_log,start_link,[]}}, {restart_type,transient}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.77.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<0.84.0>}, {name,ns_node_disco_conf_events}, {mfa,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,transient}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.85.0> 2011-01-03 12:53:08 =============================================================================== ns_config_rep init pulling INFO REPORT <0.85.0> 2011-01-03 12:53:08 =============================================================================== ns_config_rep init pushing INFO REPORT <0.85.0> 2011-01-03 12:53:08 =============================================================================== ns_config_rep init reannouncing INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco_conf_events config on otp INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: otp -> [{cookie,pmqchiglstnppkwf}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: alerts -> [{email,[]}, {email_alerts,false}, [{user,undefined}, {pass,"********"}, {addr,undefined}, {port,undefined}, {encrypt,false}], {alerts,[server_down,server_unresponsive,server_up,server_joined,server_left, bucket_created,bucket_deleted,bucket_auth_failed]}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: buckets -> [{configs,[]}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: memory_quota -> 3268 INFO REPORT <0.87.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: moxi -> [{port,11211},{verbosity,[]}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco_conf_events config on nodes_wanted INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: nodes_wanted -> ['ns_1@10.2.1.100'] INFO REPORT <0.88.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.87.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.2.1.100'], with cookie: pmqchiglstnppkwf INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: port_servers -> [{moxi,"./bin/moxi/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,stderr_to_stdout,stream]}, {memcached,"./bin/memcached/memcached", ["-X","./bin/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","./bin/bucket_engine/bucket_engine.so","-B","binary","-r", "-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,stream]}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: replication -> [{enabled,true}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: rest -> [{port,8091}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: rest_creds -> ******** INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: {node,'ns_1@10.2.1.100',isasl} -> [{path,"c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: {node,'ns_1@10.2.1.100',membership} -> active INFO REPORT <0.88.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.2.1.100'], with cookie: pmqchiglstnppkwf INFO REPORT <0.87.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.2.1.100'], with cookie: pmqchiglstnppkwf INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: {node,'ns_1@10.2.1.100',memcached} -> [{port,11210}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines,[{membase,[{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached,[{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== config change: {node,'ns_1@10.2.1.100',ns_log} -> [{filename,"c:/Program Files/Membase/Server/data/ns_1/ns_log"}] INFO REPORT <0.65.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.88.0> 2011-01-03 12:53:08 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.2.1.100'], with cookie: pmqchiglstnppkwf INFO REPORT <0.85.0> 2011-01-03 12:53:08 =============================================================================== Pushing config PROGRESS REPORT <0.77.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<0.85.0>}, {name,ns_config_rep}, {mfa,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.77.0>}, {name,ns_node_disco_sup}, {mfa,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <0.85.0> 2011-01-03 12:53:08 =============================================================================== Pushing config done PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.90.0>}, {name,ns_heart}, {mfa,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.93.0>}, {name,ns_doctor}, {mfa,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.93.0> 2011-01-03 12:53:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:78: Got initial status [{'ns_1@10.2.1.100', [{last_heard, {1294,87988,399006}}, {active_buckets,[]}, {memory, [{total,5381712}, {processes,1128076}, {processes_used,1122692}, {system,4253636}, {atom,332957}, {atom_used,321142}, {binary,34224}, {code,2347836}, {ets,177564}]}, {cluster_compatibility_version, 1}, {version, [{os_mon,"2.2.4"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,0}, {memory_data, {4284698624,644468736, {<0.7.0>,98440}}}, {disk_data, [{"C:\\",48162864,49}, {"D:\\",51279476,0}, {"G:\\",34724465,17}]}, {replication,[]}, {system_memory_data, [{total_memory, 4284698624}, {free_memory,3638571008}, {system_total_memory, 4284698624}]}, {statistics, [{wall_clock,{390,47}}, {context_switches, {4991,0}}, {garbage_collection, {869,1702021,0}}, {io, {{input,2440237}, {output,118455}}}, {reductions, {866815,866815}}, {run_queue,0}, {runtime,{234,234}}]}]}] PROGRESS REPORT <0.95.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.96.0>}, {name,menelaus_web}, {mfa,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <0.95.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.98.0>}, {name,menelaus_event}, {mfa,{menelaus_event,start_link,[]}}, {restart_type,transient}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <0.95.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.99.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== ns_log: logging menelaus_app:1:Membase Server has started on web port 8091 on node 'ns_1@10.2.1.100'. PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.95.0>}, {name,menelaus}, {mfa,{menelaus_app,start_subapp,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.100.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<0.101.0>}, {name,ns_port_init}, {mfa,{ns_port_init,start_link,[]}}, {restart_type,transient}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.102.0> 2011-01-03 12:53:08 =============================================================================== starting ns_port_server with delay of 5000 PROGRESS REPORT <0.100.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<0.102.0>}, {name, {moxi,"./bin/moxi/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,stderr_to_stdout,stream]}}, {mfa, {supervisor_cushion,start_link, [moxi,5000,ns_port_server,start_link, [moxi,"./bin/moxi/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.104.0> 2011-01-03 12:53:08 =============================================================================== starting ns_port_server with delay of 5000 PROGRESS REPORT <0.100.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<0.104.0>}, {name, {memcached,"./bin/memcached/memcached", ["-X","./bin/memcached/stdin_term_handler.so","-p","11210", "-E","./bin/bucket_engine/bucket_engine.so","-B","binary", "-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,stream]}}, {mfa, {supervisor_cushion,start_link, [memcached,5000,ns_port_server,start_link, [memcached,"./bin/memcached/memcached", ["-X","./bin/memcached/stdin_term_handler.so","-p", "11210","-E","./bin/bucket_engine/bucket_engine.so","-B", "binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.100.0>}, {name,ns_port_sup}, {mfa,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.106.0>}, {name,ns_tick_event}, {mfa,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.107.0>}, {name,ns_stats_event}, {mfa,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.108.0>}, {name,ns_good_bucket_worker}, {mfa,{work_queue,start_link,[ns_good_bucket_worker]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.109.0>}, {name,ns_good_bucket_sup}, {mfa,{ns_bucket_sup,start_link, [ns_good_bucket_sup, #Fun, ns_good_bucket_worker]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== ns_1@10.2.1.100:misc:725: start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.110.0> on 'ns_1@10.2.1.100' PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.110.0>}, {name,ns_orchestrator}, {mfa,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] PROGRESS REPORT <0.115.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_sup} started [{pid,<0.116.0>}, {name,mnesia_event}, {mfa,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] PROGRESS REPORT <0.117.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.118.0>}, {name,mnesia_monitor}, {mfa,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.117.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.119.0>}, {name,mnesia_subscr}, {mfa,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.117.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.120.0>}, {name,mnesia_locker}, {mfa,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.117.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.121.0>}, {name,mnesia_recover}, {mfa,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] PROGRESS REPORT <0.117.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.122.0>}, {name,mnesia_tm}, {mfa,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] PROGRESS REPORT <0.117.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.123.0>}, {name,mnesia_checkpoint_sup}, {mfa,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.117.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.124.0>}, {name,mnesia_snmp_sup}, {mfa,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.117.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.125.0>}, {name,mnesia_controller}, {mfa,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.117.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.126.0>}, {name,mnesia_late_loader}, {mfa,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.115.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,mnesia_sup} started [{pid,<0.117.0>}, {name,mnesia_kernel_sup}, {mfa,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.7.0> 2011-01-03 12:53:08 =============================================================================== application mnesia started_at 'ns_1@10.2.1.100' PROGRESS REPORT <0.34.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<0.129.0>}, {name,disk_log_sup}, {mfa,{disk_log_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] PROGRESS REPORT <0.34.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<0.130.0>}, {name,disk_log_server}, {mfa,{disk_log_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <0.34.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<0.134.0>}, {name,dets_sup}, {mfa,{dets_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] PROGRESS REPORT <0.34.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<0.135.0>}, {name,dets}, {mfa,{dets_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] INFO REPORT <0.111.0> 2011-01-03 12:53:08 =============================================================================== ns_1@10.2.1.100:ns_mnesia:250: Committed schema to disk. INFO REPORT <0.111.0> 2011-01-03 12:53:08 =============================================================================== ns_1@10.2.1.100:ns_mnesia:196: Current config: [{access_module,mnesia}, {auto_repair,true}, {backup_module,mnesia_backup}, {checkpoints,[]}, {db_nodes,['ns_1@10.2.1.100']}, {debug,verbose}, {directory, "c:/Program Files/Membase/Server/Mnesia.ns_1@10.2.1.100"}, {dump_log_load_regulation, false}, {dump_log_time_threshold, 180000}, {dump_log_update_in_place, true}, {dump_log_write_threshold, 1000}, {embedded_mnemosyne,false}, {event_module,mnesia_event}, {extra_db_nodes,[]}, {fallback_activated,false}, {held_locks,[]}, {ignore_fallback_at_startup, false}, {fallback_error_function, {mnesia,lkill}}, {is_running,yes}, {local_tables,[schema]}, {lock_queue,[]}, {log_version,"4.3"}, {master_node_tables,[]}, {max_wait_for_decision,10000}, {protocol_version,{7,6}}, {running_db_nodes, ['ns_1@10.2.1.100']}, {schema_location,opt_disc}, {schema_version,{3,0}}, {subscribers, [<0.116.0>,<0.111.0>]}, {tables,[schema]}, {transaction_commits,3}, {transaction_failures,0}, {transaction_log_writes,1}, {transaction_restarts,0}, {transactions,[]}, {use_dir,true}, {core_dir,false}, {no_table_loaders,2}, {dc_dump_limit,4}, {version,"4.4.12"}] INFO REPORT <0.111.0> 2011-01-03 12:53:08 =============================================================================== ns_1@10.2.1.100:ns_mnesia:144: Info from Mnesia: Create Directory "c:/Program Files/Membase/Server/Mnesia.ns_1@10.2.1.100" INFO REPORT <0.111.0> 2011-01-03 12:53:08 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,schema, [{name,schema}, {type,set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,false}, {record_name,schema}, {attributes,[table,cstruct]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,87988,524000},'ns_1@10.2.1.100'}}, {version,{{3,0},{'ns_1@10.2.1.100',{1294,87988,571001}}}}]}, [{schema,schema, [{name,schema}, {type,set}, {ram_copies,[]}, {disc_copies,[]}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,false}, {record_name,schema}, {attributes,[table,cstruct]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,87988,524000},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}], {tid,3,<0.128.0>}} PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.111.0>}, {name,ns_mnesia}, {mfa,{ns_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.149.0>}, {name,ns_bad_bucket_worker}, {mfa,{work_queue,start_link,[ns_bad_bucket_worker]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.150.0>}, {name,ns_bad_bucket_sup}, {mfa,{ns_bucket_sup,start_link, [ns_bad_bucket_sup, #Fun, ns_bad_bucket_worker]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.151.0>}, {name,ns_moxi_sup}, {mfa,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== ns_1@10.2.1.100:misc:725: start_singleton(gen_server, ns_tick, [], []): started as <0.152.0> on 'ns_1@10.2.1.100' PROGRESS REPORT <0.71.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.152.0>}, {name,ns_tick}, {mfa,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.61.0> 2011-01-03 12:53:08 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<0.71.0>}, {name,ns_server_sup}, {mfa,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.7.0> 2011-01-03 12:53:08 =============================================================================== application ns_server started_at 'ns_1@10.2.1.100' ERROR REPORT <0.147.0> 2011-01-03 12:53:08 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.153.0> 2011-01-03 12:53:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.161.0> 2011-01-03 12:53:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.163.0> 2011-01-03 12:53:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.167.0> 2011-01-03 12:53:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.169.0> 2011-01-03 12:53:26 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.175.0> 2011-01-03 12:53:28 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.183.0> 2011-01-03 12:53:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.185.0> 2011-01-03 12:53:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.189.0> 2011-01-03 12:53:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.191.0> 2011-01-03 12:53:46 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.197.0> 2011-01-03 12:53:48 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.199.0> 2011-01-03 12:53:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.207.0> 2011-01-03 12:53:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.209.0> 2011-01-03 12:54:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.213.0> 2011-01-03 12:54:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.215.0> 2011-01-03 12:54:06 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.93.0> 2011-01-03 12:54:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88047,405401}}, {active_buckets,[]}, {memory, [{total,8509368}, {processes,2382660}, {processes_used,2374492}, {system,6126708}, {atom,513069}, {atom_used,485883}, {binary,52152}, {code,3941715}, {ets,255620}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,59}, {memory_data,{4284698624,644468736,{<0.7.0>,98440}}}, {disk_data, [{"C:\\",48162864,49},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3678248960}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{50395,0}}, {context_switches,{10344,0}}, {garbage_collection,{1677,3546978,0}}, {io,{{input,4173093},{output,235503}}}, {reductions,{1616414,9260}}, {run_queue,0}, {runtime,{514,0}}]}]}] ERROR REPORT <0.223.0> 2011-01-03 12:54:08 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.235.0> 2011-01-03 12:54:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.65.0> 2011-01-03 12:54:19 =============================================================================== config change: {node,'ns_1@10.2.1.100',memcached} -> [{dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines,[{membase,[{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached,[{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}] INFO REPORT <0.65.0> 2011-01-03 12:54:19 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:54:19 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:54:19 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 12:54:19 =============================================================================== config change: memory_quota -> 3268 INFO REPORT <0.65.0> 2011-01-03 12:54:19 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:54:19 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:54:19 =============================================================================== Pushing config done ERROR REPORT <0.239.0> 2011-01-03 12:54:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.241.0> 2011-01-03 12:54:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.245.0> 2011-01-03 12:54:23 =============================================================================== ns_1@10.2.1.100:ns_storage_conf:273: Result of deleting file "c:/Program Files/Membase/Server/data/ns_1/default": {error, enoent} INFO REPORT <0.245.0> 2011-01-03 12:54:23 =============================================================================== ns_1@10.2.1.100:ns_storage_conf:273: Result of deleting file "c:/Program Files/Membase/Server/data/ns_1/default-0.mb": {error, enoent} INFO REPORT <0.245.0> 2011-01-03 12:54:23 =============================================================================== ns_1@10.2.1.100:ns_storage_conf:273: Result of deleting file "c:/Program Files/Membase/Server/data/ns_1/default-1.mb": {error, enoent} INFO REPORT <0.245.0> 2011-01-03 12:54:23 =============================================================================== ns_1@10.2.1.100:ns_storage_conf:273: Result of deleting file "c:/Program Files/Membase/Server/data/ns_1/default-2.mb": {error, enoent} INFO REPORT <0.245.0> 2011-01-03 12:54:23 =============================================================================== ns_1@10.2.1.100:ns_storage_conf:273: Result of deleting file "c:/Program Files/Membase/Server/data/ns_1/default-3.mb": {error, enoent} INFO REPORT <0.133.0> 2011-01-03 12:54:23 =============================================================================== ns_log: logging menelaus_web:12:Created bucket "default" of type: membase INFO REPORT <0.65.0> 2011-01-03 12:54:23 =============================================================================== config change: buckets -> [{configs,[{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers,[]}, {map,undefined}]}]}] INFO REPORT <0.65.0> 2011-01-03 12:54:23 =============================================================================== Writing isasl passwd file: "c:/Program Files/Membase/Server/data/ns_1/isasl.pw" INFO REPORT <0.65.0> 2011-01-03 12:54:23 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:54:23 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:54:23 =============================================================================== Pushing config done ERROR REPORT <0.248.0> 2011-01-03 12:54:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.103.0> 2011-01-03 12:54:24 =============================================================================== moxi<0.103.0>: 2011-01-03 12:55:40: (agent_config.c.650 ERROR REPORT <0.250.0> 2011-01-03 12:54:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** SUPERVISOR REPORT <0.95.0> 2011-01-03 12:54:28 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.99.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:54:28 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.255.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.149.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_bucket_sup:70: ns_bad_bucket_sup: Starting new child: {{stats_collector, "default"}, {stats_collector, start_link, ["default"]}, permanent, 10, worker, [stats_collector]} INFO REPORT <0.108.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_bucket_sup:70: ns_good_bucket_sup: Starting new child: {{ns_vbm_sup, "default"}, {ns_vbm_sup, start_link, ["default"]}, permanent, 1000, worker, [ns_vbm_sup]} INFO REPORT <0.65.0> 2011-01-03 12:54:28 =============================================================================== config change: buckets -> [{configs,[{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers,['ns_1@10.2.1.100']}, {map,undefined}]}]}] INFO REPORT <0.65.0> 2011-01-03 12:54:28 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:54:28 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:54:28 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 12:54:28 =============================================================================== config change: buckets -> [{configs,[{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers,['ns_1@10.2.1.100']}, {map,[['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100'|...], [...]|...]}]}]}] INFO REPORT <0.65.0> 2011-01-03 12:54:28 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:54:28 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:54:28 =============================================================================== Pushing config done PROGRESS REPORT <0.150.0> 2011-01-03 12:54:28 =============================================================================== supervisor {local,ns_bad_bucket_sup} started [{pid,<0.259.0>}, {name,{stats_collector,"default"}}, {mfa,{stats_collector,start_link,["default"]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.149.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_bucket_sup:70: ns_bad_bucket_sup: Starting new child: {{stats_archiver, "default"}, {stats_archiver, start_link, ["default"]}, permanent, 10, worker, [stats_archiver]} PROGRESS REPORT <0.109.0> 2011-01-03 12:54:28 =============================================================================== supervisor {local,ns_good_bucket_sup} started [{pid,<0.260.0>}, {name,{ns_vbm_sup,"default"}}, {mfa,{ns_vbm_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <0.108.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_bucket_sup:70: ns_good_bucket_sup: Starting new child: {{ns_memcached, "default"}, {ns_memcached, start_link, ["default"]}, permanent, 86400000, worker, [ns_memcached]} PROGRESS REPORT <0.109.0> 2011-01-03 12:54:28 =============================================================================== supervisor {local,ns_good_bucket_sup} started [{pid,<0.261.0>}, {name,{ns_memcached,"default"}}, {mfa,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:120: Created table 'stats_archiver-default-minute' INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-minute', [{name,'stats_archiver-default-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,465401},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [], {tid,4,<0.263.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-minute', [{name,'stats_archiver-default-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,465401},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [{schema,'stats_archiver-default-minute', [{name,'stats_archiver-default-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,465401},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}], {tid,4,<0.263.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:120: Created table 'stats_archiver-default-hour' INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-hour', [{name,'stats_archiver-default-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,512401},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [], {tid,5,<0.269.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-hour', [{name,'stats_archiver-default-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,512401},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [{schema,'stats_archiver-default-hour', [{name,'stats_archiver-default-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,512401},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}], {tid,5,<0.269.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:120: Created table 'stats_archiver-default-day' INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-day', [{name,'stats_archiver-default-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,543403},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [], {tid,6,<0.275.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-day', [{name,'stats_archiver-default-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,543403},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [{schema,'stats_archiver-default-day', [{name,'stats_archiver-default-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,543403},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}], {tid,6,<0.275.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:120: Created table 'stats_archiver-default-week' INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-week', [{name,'stats_archiver-default-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,605402},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [], {tid,7,<0.281.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-week', [{name,'stats_archiver-default-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,605402},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [{schema,'stats_archiver-default-week', [{name,'stats_archiver-default-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,605402},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}], {tid,7,<0.281.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:120: Created table 'stats_archiver-default-month' INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-month', [{name,'stats_archiver-default-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,652402},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [], {tid,8,<0.287.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-month', [{name,'stats_archiver-default-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,652402},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [{schema,'stats_archiver-default-month', [{name,'stats_archiver-default-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,652402},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}], {tid,8,<0.287.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:120: Created table 'stats_archiver-default-year' INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-year', [{name,'stats_archiver-default-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,715402},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [], {tid,9,<0.293.0>}} INFO REPORT <0.111.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_mnesia:170: Mnesia table event: {write,schema, {schema,'stats_archiver-default-year', [{name,'stats_archiver-default-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,715402},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}, [{schema,'stats_archiver-default-year', [{name,'stats_archiver-default-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.2.1.100']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1294,88068,715402},'ns_1@10.2.1.100'}}, {version,{{2,0},[]}}]}], {tid,9,<0.293.0>}} PROGRESS REPORT <0.150.0> 2011-01-03 12:54:28 =============================================================================== supervisor {local,ns_bad_bucket_sup} started [{pid,<0.262.0>}, {name,{stats_archiver,"default"}}, {mfa,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.149.0> 2011-01-03 12:54:28 =============================================================================== ns_1@10.2.1.100:ns_bucket_sup:70: ns_bad_bucket_sup: Starting new child: {{stats_reader, "default"}, {stats_reader, start_link, ["default"]}, permanent, 10, worker, [stats_reader]} PROGRESS REPORT <0.150.0> 2011-01-03 12:54:28 =============================================================================== supervisor {local,ns_bad_bucket_sup} started [{pid,<0.299.0>}, {name,{stats_reader,"default"}}, {mfa,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.65.0> 2011-01-03 12:54:29 =============================================================================== unsupervising port: {moxi,"./bin/moxi/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,stderr_to_stdout,stream]} INFO REPORT <0.65.0> 2011-01-03 12:54:29 =============================================================================== supervising port: {moxi,"./bin/moxi/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","j4958ph"}]}, use_stdio,stderr_to_stdout,stream]} INFO REPORT <0.300.0> 2011-01-03 12:54:29 =============================================================================== starting ns_port_server with delay of 5000 INFO REPORT <0.97.0> 2011-01-03 12:54:29 =============================================================================== menelaus_web streaming socket closed by client PROGRESS REPORT <0.100.0> 2011-01-03 12:54:29 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<0.300.0>}, {name, {moxi,"./bin/moxi/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","j4958ph"}]}, use_stdio,stderr_to_stdout,stream]}}, {mfa, {supervisor_cushion,start_link, [moxi,5000,ns_port_server,start_link, [moxi,"./bin/moxi/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","j4958ph"}]}, use_stdio,stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.65.0> 2011-01-03 12:54:29 =============================================================================== config change: rest -> [{port,8091}] INFO REPORT <0.65.0> 2011-01-03 12:54:29 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:54:29 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:54:29 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 12:54:29 =============================================================================== config change: rest_creds -> ******** INFO REPORT <0.65.0> 2011-01-03 12:54:29 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:54:29 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:54:29 =============================================================================== Pushing config done INFO REPORT <0.256.0> 2011-01-03 12:54:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.305.0> 2011-01-03 12:54:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.256.0> 2011-01-03 12:54:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] INFO REPORT <0.301.0> 2011-01-03 12:54:30 =============================================================================== moxi<0.301.0>: 2011-01-03 12:54:29: (cproxy_config.c.316) env: MOXI_SASL_PLAIN_USR (13) moxi<0.301.0>: 2011-01-03 12:54:29: (cproxy_config.c.325) env: MOXI_SASL_PLAIN_PWD (7) ERROR REPORT <0.313.0> 2011-01-03 12:54:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.256.0> 2011-01-03 12:54:31 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] INFO REPORT <0.256.0> 2011-01-03 12:54:32 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.315.0> 2011-01-03 12:54:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** SUPERVISOR REPORT <0.95.0> 2011-01-03 12:54:33 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.255.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:54:33 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.321.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.256.0> 2011-01-03 12:54:33 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.256.0> 2011-01-03 12:54:33 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.100'] CRASH REPORT <0.261.0> 2011-01-03 12:54:33 =============================================================================== Crashing process initial_call {ns_memcached,init,['Argument__1']} pid <0.261.0> registered_name [] error_info {error,{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,create_bucket,4}, {ns_memcached,ensure_bucket,2}, {ns_memcached,init,1}, {proc_lib,init_p_do_apply,3}]} ancestors [ns_good_bucket_sup,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.109.0>,#Port<0.3302>] dictionary [] trap_exit false status running heap_size 28657 stack_size 24 reductions 4568 SUPERVISOR REPORT <0.109.0> 2011-01-03 12:54:33 =============================================================================== Reporting supervisor {local,ns_good_bucket_sup} Child process errorContext child_terminated reason {badmatch,{error,timeout}} pid <0.261.0> name {ns_memcached,"default"} start_function {ns_memcached,start_link,["default"]} restart_type permanent shutdown 86400000 child_type worker PROGRESS REPORT <0.109.0> 2011-01-03 12:54:33 =============================================================================== supervisor {local,ns_good_bucket_sup} started [{pid,<0.322.0>}, {name,{ns_memcached,"default"}}, {mfa,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] ERROR REPORT <0.323.0> 2011-01-03 12:54:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.325.0> 2011-01-03 12:54:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** SUPERVISOR REPORT <0.95.0> 2011-01-03 12:54:38 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.321.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:54:38 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.334.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] ERROR REPORT <0.322.0> 2011-01-03 12:54:38 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. INFO REPORT <0.333.0> 2011-01-03 12:54:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.338.0> 2011-01-03 12:54:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.333.0> 2011-01-03 12:54:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.340.0> 2011-01-03 12:54:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.333.0> 2011-01-03 12:54:41 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] INFO REPORT <0.333.0> 2011-01-03 12:54:42 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.346.0> 2011-01-03 12:54:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** SUPERVISOR REPORT <0.95.0> 2011-01-03 12:54:43 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.334.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:54:43 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.349.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.333.0> 2011-01-03 12:54:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.333.0> 2011-01-03 12:54:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.100'] ERROR REPORT <0.350.0> 2011-01-03 12:54:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.322.0> 2011-01-03 12:54:44 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. ERROR REPORT <0.356.0> 2011-01-03 12:54:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** SUPERVISOR REPORT <0.95.0> 2011-01-03 12:54:48 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.349.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:54:48 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.366.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] ERROR REPORT <0.367.0> 2011-01-03 12:54:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.364.0> 2011-01-03 12:54:49 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.369.0> 2011-01-03 12:54:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.364.0> 2011-01-03 12:54:50 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.322.0> 2011-01-03 12:54:50 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. ERROR REPORT <0.375.0> 2011-01-03 12:54:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.364.0> 2011-01-03 12:54:51 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] INFO REPORT <0.364.0> 2011-01-03 12:54:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.377.0> 2011-01-03 12:54:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** SUPERVISOR REPORT <0.95.0> 2011-01-03 12:54:53 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.366.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:54:53 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.380.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.364.0> 2011-01-03 12:54:53 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.364.0> 2011-01-03 12:54:53 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.100'] ERROR REPORT <0.382.0> 2011-01-03 12:54:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.387.0> 2011-01-03 12:54:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.322.0> 2011-01-03 12:54:56 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. SUPERVISOR REPORT <0.95.0> 2011-01-03 12:54:58 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.380.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:54:58 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.397.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.395.0> 2011-01-03 12:54:59 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.398.0> 2011-01-03 12:54:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.395.0> 2011-01-03 12:55:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.404.0> 2011-01-03 12:55:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.395.0> 2011-01-03 12:55:01 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] INFO REPORT <0.395.0> 2011-01-03 12:55:02 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.322.0> 2011-01-03 12:55:02 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. ERROR REPORT <0.406.0> 2011-01-03 12:55:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** SUPERVISOR REPORT <0.95.0> 2011-01-03 12:55:03 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.397.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:55:03 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.413.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.395.0> 2011-01-03 12:55:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.395.0> 2011-01-03 12:55:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.100'] ERROR REPORT <0.414.0> 2011-01-03 12:55:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.416.0> 2011-01-03 12:55:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.63.0> 2011-01-03 12:55:07 =============================================================================== ns_1@10.2.1.100:ns_cluster:90: handling add_node("10.2.1.101", 8091, ..) INFO REPORT <0.63.0> 2011-01-03 12:55:07 =============================================================================== ns_1@10.2.1.100:ns_cluster:300: Posting node info to engage_cluster on {"10.2.1.101", 8091}: {struct, [{availableStorage, {struct, [{hdd, [{struct, [{path,<<"C:\\">>}, {sizeKBytes,48162864}, {usagePercent,49}]}, {struct, [{path,<<"D:\\">>}, {sizeKBytes,51279476}, {usagePercent,0}]}, {struct, [{path,<<"G:\\">>}, {sizeKBytes,34724465}, {usagePercent,17}]}]}]}}, {memoryQuota,3268}, {storageTotals, {struct, [{ram, {struct, [{usedByData,0}, {total,4284698624}, {quotaTotal,3426746368}, {used,590569472}]}}, {hdd, {struct, [{usedByData,2041856}, {total,49318772736}, {quotaTotal,49318772736}, {used,24166198640}, {free,25152574096}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"c:/Program Files/Membase/Server/data/ns_1">>}, {quotaMb,none}, {state,ok}]}]}]}}, {uptime,<<"119">>}, {memoryTotal,4284698624}, {memoryFree,3694129152}, {mcdMemoryReserved,3268}, {mcdMemoryAllocated,3268}, {otpNode,<<"ns_1@10.2.1.100">>}, {otpCookie,<<"pmqchiglstnppkwf">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {hostname,<<"10.2.1.100:8091">>}, {clusterCompatibility,1}, {version,<<"1.6.5r">>}, {os,<<"windows">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]} PROGRESS REPORT <0.429.0> 2011-01-03 12:55:07 =============================================================================== supervisor {local,inets_sup} started [{pid,<0.430.0>}, {name,ftp_sup}, {mfa,{ftp_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.432.0> 2011-01-03 12:55:07 =============================================================================== supervisor {local,httpc_profile_sup} started [{pid,<0.433.0>}, {name,httpc_manager}, {mfa,{httpc_manager,start_link,[{default,only_session_cookies}]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] PROGRESS REPORT <0.431.0> 2011-01-03 12:55:07 =============================================================================== supervisor {local,httpc_sup} started [{pid,<0.432.0>}, {name,httpc_profile_sup}, {mfa,{httpc_profile_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.431.0> 2011-01-03 12:55:07 =============================================================================== supervisor {local,httpc_sup} started [{pid,<0.434.0>}, {name,httpc_handler_sup}, {mfa,{httpc_handler_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.429.0> 2011-01-03 12:55:07 =============================================================================== supervisor {local,inets_sup} started [{pid,<0.431.0>}, {name,httpc_sup}, {mfa,{httpc_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.429.0> 2011-01-03 12:55:07 =============================================================================== supervisor {local,inets_sup} started [{pid,<0.435.0>}, {name,httpd_sup}, {mfa,{httpd_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.429.0> 2011-01-03 12:55:07 =============================================================================== supervisor {local,inets_sup} started [{pid,<0.436.0>}, {name,tftp_sup}, {mfa,{tftp_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.7.0> 2011-01-03 12:55:07 =============================================================================== application inets started_at 'ns_1@10.2.1.100' INFO REPORT <0.63.0> 2011-01-03 12:55:07 =============================================================================== ns_1@10.2.1.100:ns_cluster:306: Reply from engage_cluster on {"10.2.1.101", 8091}: {ok,{struct,[{<<"availableStorage">>, {struct,[{<<"hdd">>, [{struct,[{<<"path">>,<<"C:\\">>}, {<<"sizeKBytes">>,46243100}, {<<"usagePercent">>,36}]}, {struct,[{<<"path">>,<<"D:\\">>}, {<<"sizeKBytes">>,51809624}, {<<"usagePercent">>,0}]}, {struct,[{<<"path">>,<<"G:\\">>}, {<<"sizeKBytes">>,33929248}, {<<"usagePercent">>,18}]}]}]}}, {<<"memoryQuota">>,3268}, {<<"storageTotals">>, {struct,[{<<"ram">>, {struct,[{<<"usedByData">>,0}, {<<"total">>,4284698624.0}, {<<"quotaTotal">>,3426746368.0}, {<<"used">>,681070592}]}}, {<<"hdd">>, {struct,[{<<"usedByData">>,0}, {<<"total">>,47352934400.0}, {<<"quotaTotal">>,47352934400.0}, {<<"used">>,17047056384.0}, {<<"free">>,30305878016.0}]}}]}}, {<<"storage">>, {struct,[{<<"ssd">>,[]}, {<<"hdd">>, [{struct,[{<<"path">>, <<"c:/Program Files/Membase/Server/data/ns_1">>}, {<<"quotaMb">>,<<"none">>}, {<<"state">>,<<"ok">>}]}]}]}}, {<<"uptime">>,<<"49">>}, {<<"memoryTotal">>,4284698624.0}, {<<"memoryFree">>,3603628032.0}, {<<"mcdMemoryReserved">>,3268}, {<<"mcdMemoryAllocated">>,3268}, {<<"otpNode">>,<<"ns_1@10.2.1.101">>}, {<<"otpCookie">>,<<"pdofzwfyczwsowjp">>}, {<<"clusterMembership">>,<<"active">>}, {<<"status">>,<<"healthy">>}, {<<"hostname">>,<<"10.2.1.101:8091">>}, {<<"clusterCompatibility">>,1}, {<<"version">>,<<"1.6.5r">>}, {<<"os">>,<<"windows">>}, {<<"ports">>, {struct,[{<<"proxy">>,11211},{<<"direct">>,11210}]}}]}} INFO REPORT <0.63.0> 2011-01-03 12:55:07 =============================================================================== ns_1@10.2.1.100:ns_cluster:371: port_please("ns_1", "10.2.1.101") = 21100 INFO REPORT <0.63.0> 2011-01-03 12:55:07 =============================================================================== ns_1@10.2.1.100:ns_cluster:461: Started node add transaction by adding node 'ns_1@10.2.1.101' to nodes_wanted INFO REPORT <0.65.0> 2011-01-03 12:55:07 =============================================================================== ns_node_disco_conf_events config on nodes_wanted INFO REPORT <0.65.0> 2011-01-03 12:55:07 =============================================================================== config change: nodes_wanted -> ['ns_1@10.2.1.100','ns_1@10.2.1.101'] INFO REPORT <0.438.0> 2011-01-03 12:55:07 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.438.0> 2011-01-03 12:55:07 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.2.1.100','ns_1@10.2.1.101'], with cookie: pmqchiglstnppkwf INFO REPORT <0.65.0> 2011-01-03 12:55:07 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:55:07 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:55:07 =============================================================================== Pushing config done INFO REPORT <0.63.0> 2011-01-03 12:55:07 =============================================================================== ns_1@10.2.1.100:ns_cluster:431: Posting the following to complete_join on "10.2.1.101:8091": {struct, [{<<"targetNode">>,'ns_1@10.2.1.101'}, {availableStorage, {struct, [{hdd, [{struct, [{path,<<"C:\\">>}, {sizeKBytes,48162864}, {usagePercent,49}]}, {struct, [{path,<<"D:\\">>}, {sizeKBytes,51279476}, {usagePercent,0}]}, {struct, [{path,<<"G:\\">>}, {sizeKBytes,34724465}, {usagePercent,17}]}]}]}}, {memoryQuota,3268}, {storageTotals, {struct, [{ram, {struct, [{usedByData,0}, {total,4284698624}, {quotaTotal,3426746368}, {used,590569472}]}}, {hdd, {struct, [{usedByData,2041856}, {total,49318772736}, {quotaTotal,49318772736}, {used,24166198640}, {free,25152574096}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"c:/Program Files/Membase/Server/data/ns_1">>}, {quotaMb,none}, {state,ok}]}]}]}}, {uptime,<<"119">>}, {memoryTotal,4284698624}, {memoryFree,3694129152}, {mcdMemoryReserved,3268}, {mcdMemoryAllocated,3268}, {otpNode,<<"ns_1@10.2.1.100">>}, {otpCookie,<<"pmqchiglstnppkwf">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {hostname,<<"10.2.1.100:8091">>}, {clusterCompatibility,1}, {version,<<"1.6.5r">>}, {os,<<"windows">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]} INFO REPORT <0.65.0> 2011-01-03 12:55:07 =============================================================================== config change: {node,'ns_1@10.2.1.101',membership} -> inactiveAdded INFO REPORT <0.65.0> 2011-01-03 12:55:07 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:55:07 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:55:07 =============================================================================== Pushing config done INFO REPORT <0.438.0> 2011-01-03 12:55:07 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.2.1.100'], with cookie: pmqchiglstnppkwf INFO REPORT <0.79.0> 2011-01-03 12:55:08 =============================================================================== ns_log: logging ns_node_disco:4:Node 'ns_1@10.2.1.100' saw that node 'ns_1@10.2.1.101' came up. INFO REPORT <0.65.0> 2011-01-03 12:55:08 =============================================================================== ns_node_disco_conf_events config on otp INFO REPORT <0.65.0> 2011-01-03 12:55:08 =============================================================================== config change: otp -> [{cookie,pmqchiglstnppkwf}] INFO REPORT <0.452.0> 2011-01-03 12:55:08 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.452.0> 2011-01-03 12:55:08 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.2.1.100','ns_1@10.2.1.101'], with cookie: pmqchiglstnppkwf INFO REPORT <0.65.0> 2011-01-03 12:55:08 =============================================================================== config change: {node,'ns_1@10.2.1.101',ns_log} -> [{filename,"c:/Program Files/Membase/Server/data/ns_1/ns_log"}] INFO REPORT <0.65.0> 2011-01-03 12:55:08 =============================================================================== config change: {node,'ns_1@10.2.1.101',isasl} -> [{path,"c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}] INFO REPORT <0.65.0> 2011-01-03 12:55:08 =============================================================================== config change: {node,'ns_1@10.2.1.101',memcached} -> [{dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines,[{membase,[{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached,[{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}] INFO REPORT <0.65.0> 2011-01-03 12:55:08 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:55:08 =============================================================================== Pushing config INFO REPORT <0.452.0> 2011-01-03 12:55:08 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.2.1.100','ns_1@10.2.1.101'], with cookie: pmqchiglstnppkwf INFO REPORT <0.85.0> 2011-01-03 12:55:08 =============================================================================== Pushing config done INFO REPORT <0.93.0> 2011-01-03 12:55:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88107,403401}}, {active_buckets,[]}, {memory, [{total,11046880}, {processes,4512340}, {processes_used,4507356}, {system,6534540}, {atom,532781}, {atom_used,515434}, {binary,122152}, {code,4205862}, {ets,299564}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,119}, {memory_data,{4284698624,590569472,{<0.71.0>,486368}}}, {disk_data, [{"C:\\",48162864,49},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3684864000}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{110393,0}}, {context_switches,{17583,0}}, {garbage_collection,{2949,13052016,0}}, {io,{{input,4967841},{output,981090}}}, {reductions,{3772115,150481}}, {run_queue,0}, {runtime,{624,0}}]}]}] SUPERVISOR REPORT <0.95.0> 2011-01-03 12:55:08 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.413.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:55:08 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.461.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] ERROR REPORT <0.322.0> 2011-01-03 12:55:08 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. INFO REPORT <0.63.0> 2011-01-03 12:55:08 =============================================================================== ns_1@10.2.1.100:ns_cluster:437: Reply from complete_join on "10.2.1.101:8091": {ok,[]} INFO REPORT <0.63.0> 2011-01-03 12:55:08 =============================================================================== ns_1@10.2.1.100:ns_cluster:92: add_node("10.2.1.101", 8091, ..) -> {ok, 'ns_1@10.2.1.101'} INFO REPORT <0.459.0> 2011-01-03 12:55:09 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.467.0> 2011-01-03 12:55:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.459.0> 2011-01-03 12:55:10 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.470.0> 2011-01-03 12:55:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.459.0> 2011-01-03 12:55:11 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] INFO REPORT <0.459.0> 2011-01-03 12:55:12 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.478.0> 2011-01-03 12:55:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.78.0> 2011-01-03 12:55:13 =============================================================================== Detected a new node (from node 'ns_1@10.2.1.100'). Moving config around. INFO REPORT <0.78.0> 2011-01-03 12:55:13 =============================================================================== ns_node_disco_log: nodes changed: ['ns_1@10.2.1.100','ns_1@10.2.1.101'] INFO REPORT <0.85.0> 2011-01-03 12:55:13 =============================================================================== Pulling config INFO REPORT <0.85.0> 2011-01-03 12:55:13 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 12:55:13 =============================================================================== Pulling config done SUPERVISOR REPORT <0.95.0> 2011-01-03 12:55:13 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.461.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:55:13 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.488.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.459.0> 2011-01-03 12:55:13 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100'] ERROR REPORT <0.459.0> 2011-01-03 12:55:13 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.100'] ERROR REPORT <0.489.0> 2011-01-03 12:55:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.322.0> 2011-01-03 12:55:14 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. INFO REPORT <0.110.0> 2011-01-03 12:55:15 =============================================================================== ns_log: logging ns_orchestrator:4:Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101'], EjectNodes = [] INFO REPORT <0.65.0> 2011-01-03 12:55:15 =============================================================================== config change: {node,'ns_1@10.2.1.101',membership} -> active INFO REPORT <0.65.0> 2011-01-03 12:55:15 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:55:15 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:55:15 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 12:55:15 =============================================================================== config change: rebalance_status -> running INFO REPORT <0.65.0> 2011-01-03 12:55:15 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:55:15 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:55:15 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 12:55:15 =============================================================================== config change: buckets -> [{configs,[{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers,['ns_1@10.2.1.100','ns_1@10.2.1.101']}, {map,[['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100',undefined], ['ns_1@10.2.1.100'|...], [...]|...]}]}]}] INFO REPORT <0.65.0> 2011-01-03 12:55:15 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:55:15 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:55:15 =============================================================================== Pushing config done ERROR REPORT <0.491.0> 2011-01-03 12:55:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.493.0> 2011-01-03 12:55:16 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] INFO REPORT <0.493.0> 2011-01-03 12:55:17 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] INFO REPORT <0.493.0> 2011-01-03 12:55:18 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] INFO REPORT <0.110.0> 2011-01-03 12:55:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.493.0>, {dict, 2, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0]], [['ns_1@10.2.1.101'| 0.0]], [], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} SUPERVISOR REPORT <0.95.0> 2011-01-03 12:55:18 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.488.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:55:18 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.511.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.493.0> 2011-01-03 12:55:19 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] ERROR REPORT <0.515.0> 2011-01-03 12:55:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.493.0> 2011-01-03 12:55:20 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] ERROR REPORT <0.322.0> 2011-01-03 12:55:20 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. ERROR REPORT <0.518.0> 2011-01-03 12:55:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.493.0> 2011-01-03 12:55:21 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] INFO REPORT <0.110.0> 2011-01-03 12:55:21 =============================================================================== ns_log: logging ns_orchestrator:2:Rebalance exited with reason wait_for_memcached_failed INFO REPORT <0.65.0> 2011-01-03 12:55:21 =============================================================================== config change: rebalance_status -> {none,<<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>} INFO REPORT <0.65.0> 2011-01-03 12:55:21 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:55:21 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:55:21 =============================================================================== Pushing config done ERROR REPORT <0.527.0> 2011-01-03 12:55:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** SUPERVISOR REPORT <0.95.0> 2011-01-03 12:55:23 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.511.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:55:23 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.530.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] ERROR REPORT <0.532.0> 2011-01-03 12:55:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.538.0> 2011-01-03 12:55:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.322.0> 2011-01-03 12:55:26 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. SUPERVISOR REPORT <0.95.0> 2011-01-03 12:55:28 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.530.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:55:28 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.550.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.548.0> 2011-01-03 12:55:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] INFO REPORT <0.85.0> 2011-01-03 12:55:29 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.551.0> 2011-01-03 12:55:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.548.0> 2011-01-03 12:55:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] ERROR REPORT <0.557.0> 2011-01-03 12:55:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.548.0> 2011-01-03 12:55:31 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] INFO REPORT <0.548.0> 2011-01-03 12:55:32 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] ERROR REPORT <0.559.0> 2011-01-03 12:55:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.322.0> 2011-01-03 12:55:32 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. INFO REPORT <0.548.0> 2011-01-03 12:55:33 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] ERROR REPORT <0.548.0> 2011-01-03 12:55:33 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] SUPERVISOR REPORT <0.95.0> 2011-01-03 12:55:33 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.550.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:55:33 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.563.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] ERROR REPORT <0.567.0> 2011-01-03 12:55:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.569.0> 2011-01-03 12:55:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** SUPERVISOR REPORT <0.95.0> 2011-01-03 12:55:38 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.563.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:55:38 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.579.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] ERROR REPORT <0.322.0> 2011-01-03 12:55:38 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. INFO REPORT <0.577.0> 2011-01-03 12:55:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] ERROR REPORT <0.581.0> 2011-01-03 12:55:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.577.0> 2011-01-03 12:55:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] ERROR REPORT <0.586.0> 2011-01-03 12:55:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.577.0> 2011-01-03 12:55:41 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] INFO REPORT <0.577.0> 2011-01-03 12:55:42 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] ERROR REPORT <0.589.0> 2011-01-03 12:55:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.577.0> 2011-01-03 12:55:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] ERROR REPORT <0.577.0> 2011-01-03 12:55:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] SUPERVISOR REPORT <0.95.0> 2011-01-03 12:55:43 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.579.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:55:43 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.595.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] ERROR REPORT <0.596.0> 2011-01-03 12:55:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.322.0> 2011-01-03 12:55:44 =============================================================================== ns_1@10.2.1.100:ns_memcached:378: Unable to connect: {error, {badmatch, {error,timeout}}}, retrying. ERROR REPORT <0.598.0> 2011-01-03 12:55:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** SUPERVISOR REPORT <0.95.0> 2011-01-03 12:55:48 =============================================================================== Reporting supervisor {local,menelaus_sup} Child process errorContext child_terminated reason {noproc,{gen_server,call,['ns_memcached-default',topkeys,30000]}} pid <0.595.0> name hot_keys_keeper start_function {hot_keys_keeper,start_link,[]} restart_type permanent shutdown 5000 child_type worker PROGRESS REPORT <0.95.0> 2011-01-03 12:55:48 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.609.0>}, {name,hot_keys_keeper}, {mfa,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] ERROR REPORT <0.614.0> 2011-01-03 12:55:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.607.0> 2011-01-03 12:55:49 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] ERROR REPORT <0.616.0> 2011-01-03 12:55:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.607.0> 2011-01-03 12:55:50 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] INFO REPORT <0.322.0> 2011-01-03 12:55:51 =============================================================================== ns_log: logging ns_memcached:1:Bucket "default" loaded on node 'ns_1@10.2.1.100' in 1 seconds. ERROR REPORT <0.618.0> 2011-01-03 12:55:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.607.0> 2011-01-03 12:55:51 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.100', 'ns_1@10.2.1.101'] INFO REPORT <0.259.0> 2011-01-03 12:55:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 0 auth_errors 0 bucket_conns 1 bytes_read 655 bytes_written 13649 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 0 conn_yields 0 connection_structures 23 curr_connections 23 curr_items 0 curr_items_tot 0 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 0 ep_commit_time 0 ep_commit_time_total 0 ep_data_age 0 ep_data_age_highwat 0 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 0 ep_flush_duration_highwat 0 ep_flush_duration_total 0 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 0 ep_io_num_write 0 ep_io_read_bytes 0 ep_io_write_bytes 0 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 0 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 0 ep_num_eject_failures 0 ep_num_eject_replicas 0 ep_num_expiry_pager_runs 0 ep_num_non_resident 0 ep_num_not_my_vbuckets 0 ep_num_pager_runs 0 ep_num_value_ejects 0 ep_oom_errors 0 ep_overhead 344 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 0 ep_storage_age 0 ep_storage_age_highwat 0 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 0 ep_total_del_items 0 ep_total_enqueued 0 ep_total_new_items 0 ep_total_persisted 0 ep_vbucket_del 0 ep_vbucket_del_fail 0 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 344 pid 2160 pointer_size 64 rejected_conns 0 threads 4 time 1294088151 total_connections 24 uptime 165 version 1.4.4_304_g7d5a132 INFO REPORT <0.607.0> 2011-01-03 12:55:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.624.0> 2011-01-03 12:55:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.607.0> 2011-01-03 12:55:53 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.607.0> 2011-01-03 12:55:53 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.101'] ERROR REPORT <0.628.0> 2011-01-03 12:55:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.634.0> 2011-01-03 12:55:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.643.0> 2011-01-03 12:55:59 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.646.0> 2011-01-03 12:55:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.643.0> 2011-01-03 12:56:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.653.0> 2011-01-03 12:56:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.643.0> 2011-01-03 12:56:01 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.85.0> 2011-01-03 12:56:02 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.643.0> 2011-01-03 12:56:02 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.655.0> 2011-01-03 12:56:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.643.0> 2011-01-03 12:56:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.643.0> 2011-01-03 12:56:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.101'] ERROR REPORT <0.661.0> 2011-01-03 12:56:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.666.0> 2011-01-03 12:56:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.93.0> 2011-01-03 12:56:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88167,401401}}, {active_buckets,["default"]}, {memory, [{total,11754672}, {processes,4580652}, {processes_used,4571708}, {system,7174020}, {atom,559813}, {atom_used,556403}, {binary,308088}, {code,4562603}, {ets,349692}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,179}, {memory_data,{4284698624,603246592,{<0.228.0>,786208}}}, {disk_data, [{"C:\\",48162864,49},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3523534848}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{170391,0}}, {context_switches,{30058,0}}, {garbage_collection,{4691,31811158,0}}, {io,{{input,5584318},{output,1810806}}}, {reductions,{7183038,165087}}, {run_queue,0}, {runtime,{873,0}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88168,352400}}, {active_buckets,[]}, {memory, [{total,12276808}, {processes,5477644}, {processes_used,5470076}, {system,6799164}, {atom,543029}, {atom_used,531195}, {binary,143536}, {code,4401050}, {ets,318812}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,110}, {memory_data,{4284698624,608899072,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,36},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3662491648}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{100901,0}}, {context_switches,{23064,0}}, {garbage_collection,{3948,15357488,0}}, {io,{{input,5763682},{output,1750007}}}, {reductions,{4537962,41046}}, {run_queue,0}, {runtime,{842,16}}]}]}] INFO REPORT <0.677.0> 2011-01-03 12:56:09 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.695.0> 2011-01-03 12:56:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.677.0> 2011-01-03 12:56:10 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.702.0> 2011-01-03 12:56:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.677.0> 2011-01-03 12:56:11 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.677.0> 2011-01-03 12:56:12 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.704.0> 2011-01-03 12:56:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.677.0> 2011-01-03 12:56:13 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.677.0> 2011-01-03 12:56:13 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.101'] ERROR REPORT <0.713.0> 2011-01-03 12:56:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.715.0> 2011-01-03 12:56:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.722.0> 2011-01-03 12:56:19 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.728.0> 2011-01-03 12:56:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.722.0> 2011-01-03 12:56:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.730.0> 2011-01-03 12:56:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.722.0> 2011-01-03 12:56:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.722.0> 2011-01-03 12:56:22 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.735.0> 2011-01-03 12:56:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.722.0> 2011-01-03 12:56:23 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.722.0> 2011-01-03 12:56:23 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.101'] ERROR REPORT <0.739.0> 2011-01-03 12:56:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.743.0> 2011-01-03 12:56:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.753.0> 2011-01-03 12:56:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.758.0> 2011-01-03 12:56:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.753.0> 2011-01-03 12:56:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.760.0> 2011-01-03 12:56:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.753.0> 2011-01-03 12:56:31 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.753.0> 2011-01-03 12:56:32 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.766.0> 2011-01-03 12:56:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.753.0> 2011-01-03 12:56:33 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] ERROR REPORT <0.753.0> 2011-01-03 12:56:33 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.101'] ERROR REPORT <0.770.0> 2011-01-03 12:56:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.776.0> 2011-01-03 12:56:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.110.0> 2011-01-03 12:56:37 =============================================================================== ns_log: logging ns_orchestrator:4:Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101'], EjectNodes = [] INFO REPORT <0.72.0> 2011-01-03 12:56:37 =============================================================================== ns_log: suppressing duplicate log ns_orchestrator:4("Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101'], EjectNodes = []\n") because it's been seen 1 times in the past 82.073002 secs (last seen 82.073002 secs ago INFO REPORT <0.65.0> 2011-01-03 12:56:37 =============================================================================== config change: rebalance_status -> running INFO REPORT <0.65.0> 2011-01-03 12:56:37 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:56:37 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:56:37 =============================================================================== Pushing config done INFO REPORT <0.784.0> 2011-01-03 12:56:38 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.101'] INFO REPORT <0.110.0> 2011-01-03 12:56:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.784.0>, {dict, 2, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0]], [['ns_1@10.2.1.101'| 0.0]], [], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.101'] INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 0 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 2 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 3 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 4 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 5 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 6 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 7 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 8 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 9 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 10 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 11 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 12 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 13 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 14 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 15 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 16 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 17 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 18 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 19 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 20 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 21 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 22 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 23 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 24 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 25 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 26 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 27 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 28 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 29 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 30 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 31 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 32 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 33 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 34 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 35 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 36 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 37 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 38 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 39 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 40 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 41 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 42 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 43 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 44 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 45 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 46 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 47 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 48 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 49 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 50 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 51 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 52 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 53 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 54 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 55 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 56 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 57 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 58 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 59 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 60 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 61 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 62 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 63 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 64 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 65 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 66 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 67 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 68 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 69 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 70 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 71 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 72 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 73 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 74 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 75 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 76 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 77 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 78 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 79 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 80 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 81 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 82 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 83 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 84 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 85 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 86 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 87 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 88 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 89 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 90 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 91 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 92 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 93 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 94 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 95 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 96 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 97 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 98 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 99 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 100 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 101 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 102 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 103 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 104 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 105 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 106 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 107 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 108 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 109 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 110 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 111 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 112 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 113 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 114 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 115 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 116 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 117 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 118 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 119 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 120 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 121 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 122 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 123 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 124 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 125 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 126 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 127 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 128 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 129 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 130 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 131 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 132 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 133 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 134 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 135 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 136 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 137 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 138 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 139 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 140 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 141 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 142 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 143 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 144 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 145 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 146 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 147 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 148 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 149 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 150 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 151 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 152 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 153 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 154 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 155 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 156 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 157 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 158 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 159 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 160 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 161 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 162 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 163 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 164 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 165 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 166 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 167 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 168 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 169 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 170 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 171 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 172 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 173 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 174 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 175 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 176 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 177 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 178 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 179 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 180 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 181 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 182 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 183 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 184 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 185 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 186 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 187 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 188 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 189 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 190 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 191 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 192 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 193 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 194 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 195 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 196 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 197 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 198 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 199 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 200 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 201 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 202 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 203 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 204 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 205 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 206 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 207 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 208 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 209 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 210 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 211 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 212 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 213 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 214 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 215 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 216 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 217 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 218 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 219 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 220 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 221 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 222 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 223 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 224 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 225 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 226 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 227 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 228 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 229 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 230 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 231 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 232 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 233 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 234 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 235 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 236 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 237 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 238 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 239 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 240 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 241 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 242 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 243 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 244 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 245 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 246 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 247 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 248 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 249 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 250 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 251 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 252 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 253 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 254 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 255 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 256 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 257 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 258 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 259 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 260 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 261 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 262 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 263 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 264 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 265 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 266 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 267 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 268 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 269 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 270 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 271 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 272 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 273 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 274 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 275 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 276 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 277 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 278 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 279 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 280 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 281 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 282 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 283 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 284 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 285 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 286 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 287 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 288 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 289 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 290 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 291 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 292 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 293 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 294 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 295 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 296 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 297 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 298 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 299 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 300 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 301 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 302 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 303 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 304 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 305 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 306 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 307 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 308 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 309 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 310 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 311 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 312 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 313 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 314 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 315 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 316 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 317 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 318 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 319 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 320 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 321 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 322 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 323 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 324 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 325 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 326 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 327 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 328 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 329 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 330 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 331 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 332 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 333 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 334 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 335 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 336 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 337 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 338 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 339 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 340 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 341 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 342 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 343 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 344 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 345 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 346 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 347 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 348 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 349 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 350 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 351 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 352 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 353 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 354 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 355 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 356 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 357 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 358 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 359 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 360 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 361 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 362 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 363 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 364 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 365 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 366 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 367 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 368 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 369 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 370 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 371 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 372 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 373 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 374 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 375 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 376 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 377 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 378 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 379 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 380 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 381 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 382 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 383 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 384 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 385 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 386 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 387 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 388 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 389 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 390 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 391 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 392 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 393 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 394 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 395 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 396 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 397 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 398 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 399 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 400 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 401 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 402 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 403 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 404 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 405 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 406 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 407 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 408 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 409 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 410 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 411 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 412 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 413 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 414 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 415 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 416 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 417 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 418 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 419 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 420 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 421 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 422 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 423 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 424 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 425 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 426 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 427 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 428 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 429 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 430 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 431 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 432 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 433 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 434 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 435 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 436 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 437 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 438 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 439 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 440 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 441 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 442 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 443 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 444 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 445 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 446 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 447 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 448 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 449 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 450 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 451 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 452 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 453 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 454 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 455 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 456 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 457 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 458 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 459 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 460 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 461 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 462 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 463 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 464 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 465 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 466 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 467 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 468 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 469 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 470 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 471 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 472 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 473 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 474 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 475 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 476 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 477 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 478 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 479 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 480 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 481 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 482 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 483 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 484 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 485 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 486 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 487 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 488 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 489 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 490 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 491 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 492 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 493 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 494 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 495 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 496 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 497 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 498 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 499 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 500 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 501 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 502 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 503 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 504 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 505 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 506 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 507 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 508 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 509 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 510 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 511 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 512 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 513 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 514 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 515 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 516 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 517 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 518 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 519 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 520 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 521 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 522 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 523 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 524 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 525 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 526 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 527 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 528 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 529 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 530 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 531 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 532 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 533 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 534 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 535 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 536 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 537 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 538 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 539 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 540 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 541 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 542 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 543 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 544 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 545 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 546 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 547 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 548 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 549 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 550 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 551 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 552 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 553 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 554 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 555 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 556 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 557 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 558 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 559 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 560 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 561 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 562 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 563 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 564 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 565 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 566 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 567 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 568 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 569 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 570 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 571 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 572 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 573 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 574 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 575 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 576 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 577 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 578 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 579 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 580 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 581 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 582 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 583 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 584 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 585 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 586 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 587 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 588 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 589 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 590 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 591 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 592 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 593 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 594 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 595 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 596 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 597 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 598 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 599 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 600 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 601 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 602 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 603 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 604 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 605 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 606 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 607 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 608 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 609 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 610 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 611 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 612 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 613 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 614 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 615 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 616 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 617 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 618 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 619 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 620 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 621 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 622 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 623 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 624 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 625 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 626 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 627 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 628 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 629 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 630 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 631 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 632 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 633 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 634 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 635 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 636 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 637 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 638 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 639 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 640 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 641 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 642 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 643 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 644 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 645 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 646 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 647 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 648 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 649 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 650 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 651 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 652 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 653 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 654 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 655 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 656 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 657 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 658 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 659 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 660 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 661 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 662 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 663 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 664 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 665 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 666 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 667 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 668 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 669 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 670 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 671 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 672 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 673 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 674 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 675 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 676 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 677 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 678 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 679 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 680 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 681 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 682 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 683 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 684 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 685 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 686 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 687 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 688 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 689 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 690 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 691 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 692 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 693 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 694 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 695 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 696 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 697 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 698 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 699 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 700 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 701 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 702 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 703 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 704 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 705 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 706 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 707 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 708 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 709 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 710 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 711 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 712 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 713 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 714 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 715 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 716 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 717 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 718 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 719 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 720 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 721 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 722 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 723 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 724 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 725 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 726 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 727 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 728 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 729 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 730 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 731 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 732 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 733 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 734 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 735 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 736 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 737 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 738 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 739 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 740 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 741 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 742 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 743 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 744 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 745 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 746 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 747 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 748 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 749 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 750 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 751 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 752 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 753 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 754 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 755 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 756 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 757 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 758 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 759 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 760 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 761 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 762 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 763 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 764 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 765 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 766 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 767 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 768 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 769 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 770 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 771 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 772 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 773 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 774 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 775 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 776 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 777 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 778 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 779 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 780 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 781 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 782 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 783 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 784 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 785 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 786 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 787 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 788 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 789 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 790 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 791 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 792 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 793 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 794 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 795 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 796 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 797 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 798 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 799 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 800 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 801 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 802 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 803 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 804 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 805 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 806 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 807 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 808 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 809 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 810 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 811 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 812 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 813 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 814 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 815 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 816 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 817 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 818 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 819 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 820 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 821 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 822 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 823 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 824 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 825 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 826 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 827 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 828 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 829 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 830 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 831 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 832 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 833 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 834 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 835 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 836 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 837 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 838 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 839 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 840 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 841 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 842 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 843 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 844 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 845 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 846 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 847 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 848 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 849 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 850 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 851 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 852 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 853 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 854 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 855 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 856 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 857 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 858 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 859 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 860 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 861 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 862 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 863 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 864 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 865 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 866 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 867 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 868 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 869 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 870 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 871 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 872 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 873 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 874 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 875 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 876 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 877 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 878 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 879 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 880 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 881 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 882 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 883 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 884 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 885 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 886 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 887 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 888 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 889 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 890 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 891 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 892 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 893 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 894 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 895 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 896 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 897 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 898 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 899 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 900 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 901 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 902 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 903 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 904 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 905 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 906 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 907 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 908 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 909 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 910 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 911 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 912 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 913 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 914 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 915 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 916 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 917 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 918 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 919 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 920 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 921 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 922 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 923 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 924 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 925 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 926 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 927 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 928 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 929 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 930 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 931 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 932 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 933 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 934 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 935 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 936 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 937 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 938 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 939 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 940 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 941 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 942 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 943 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 944 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 945 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 946 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 947 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 948 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 949 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 950 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 951 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 952 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 953 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 954 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 955 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 956 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 957 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 958 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 959 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 960 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 961 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 962 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 963 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 964 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 965 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 966 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 967 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 968 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 969 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 970 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 971 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 972 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 973 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 974 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 975 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 976 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 977 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 978 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 979 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 980 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 981 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 982 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 983 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 984 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 985 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 986 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 987 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 988 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 989 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 990 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 991 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 992 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 993 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 994 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 995 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 996 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 997 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 998 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 999 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1000 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1001 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1002 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1003 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1004 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1005 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1006 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1007 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1008 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1009 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1010 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1011 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1012 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1013 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1014 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1015 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1016 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1017 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1018 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1019 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1020 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1021 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1022 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.784.0> 2011-01-03 12:56:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:151: Setting vbucket 1023 in "default" on 'ns_1@10.2.1.100' from missing to active. INFO REPORT <0.804.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.804.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.804.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.804.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.804.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.804.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.804.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.804.0>: Starting to move bucket 0 vbucketmigrator<0.804.0>: Bucket 0 moved to the next server vbucketmigrator<0.804.0>: Validate bucket states vbucketmigrator<0.804.0>: 0 ok INFO REPORT <0.806.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.806.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.806.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.806.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.806.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.806.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.806.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.806.0>: Starting to move bucket 1 vbucketmigrator<0.806.0>: Bucket 1 moved to the next server vbucketmigrator<0.806.0>: Validate bucket states vbucketmigrator<0.806.0>: 1 ok INFO REPORT <0.808.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.808.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.808.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.808.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.808.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.808.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.808.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.808.0>: Starting to move bucket 2 vbucketmigrator<0.808.0>: Bucket 2 moved to the next server vbucketmigrator<0.808.0>: Validate bucket states vbucketmigrator<0.808.0>: 2 ok INFO REPORT <0.810.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.810.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.810.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.810.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.810.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.810.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.810.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.810.0>: Starting to move bucket 3 vbucketmigrator<0.810.0>: Bucket 3 moved to the next server vbucketmigrator<0.810.0>: Validate bucket states vbucketmigrator<0.810.0>: 3 ok INFO REPORT <0.812.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.812.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.812.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.812.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.812.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.812.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.812.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.812.0>: Starting to move bucket 4 vbucketmigrator<0.812.0>: Bucket 4 moved to the next server vbucketmigrator<0.812.0>: Validate bucket states vbucketmigrator<0.812.0>: 4 ok INFO REPORT <0.814.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.814.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.814.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.814.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.814.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.814.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.814.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.814.0>: Starting to move bucket 5 vbucketmigrator<0.814.0>: Bucket 5 moved to the next server vbucketmigrator<0.814.0>: Validate bucket states vbucketmigrator<0.814.0>: 5 ok INFO REPORT <0.816.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.816.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.816.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.816.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.816.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.816.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.816.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.816.0>: Starting to move bucket 6 vbucketmigrator<0.816.0>: Bucket 6 moved to the next server vbucketmigrator<0.816.0>: Validate bucket states vbucketmigrator<0.816.0>: 6 ok INFO REPORT <0.818.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.818.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.818.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.818.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.818.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.818.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.818.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.818.0>: Starting to move bucket 7 vbucketmigrator<0.818.0>: Bucket 7 moved to the next server vbucketmigrator<0.818.0>: Validate bucket states vbucketmigrator<0.818.0>: 7 ok INFO REPORT <0.820.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.820.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.820.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.820.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.820.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.820.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.820.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.820.0>: Starting to move bucket 8 vbucketmigrator<0.820.0>: Bucket 8 moved to the next server vbucketmigrator<0.820.0>: Validate bucket states vbucketmigrator<0.820.0>: 8 ok INFO REPORT <0.822.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.822.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.822.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.822.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.822.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.822.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.822.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.822.0>: Starting to move bucket 9 vbucketmigrator<0.822.0>: Bucket 9 moved to the next server vbucketmigrator<0.822.0>: Validate bucket states vbucketmigrator<0.822.0>: 9 ok INFO REPORT <0.824.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.824.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.824.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.824.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.824.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.824.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.824.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.824.0>: Starting to move bucket 10 vbucketmigrator<0.824.0>: Bucket 10 moved to the next server vbucketmigrator<0.824.0>: Validate bucket states vbucketmigrator<0.824.0>: 10 ok INFO REPORT <0.826.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.826.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.826.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.826.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.826.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.826.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.826.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.826.0>: Starting to move bucket 11 vbucketmigrator<0.826.0>: Bucket 11 moved to the next server vbucketmigrator<0.826.0>: Validate bucket states vbucketmigrator<0.826.0>: 11 ok INFO REPORT <0.828.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.828.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.828.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.828.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.828.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.828.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.828.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.828.0>: Starting to move bucket 12 vbucketmigrator<0.828.0>: Bucket 12 moved to the next server vbucketmigrator<0.828.0>: Validate bucket states vbucketmigrator<0.828.0>: 12 ok INFO REPORT <0.830.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.830.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.830.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.830.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.830.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.830.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.830.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.830.0>: Starting to move bucket 13 vbucketmigrator<0.830.0>: Bucket 13 moved to the next server vbucketmigrator<0.830.0>: Validate bucket states vbucketmigrator<0.830.0>: 13 ok ERROR REPORT <0.800.0> 2011-01-03 12:56:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.832.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.832.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.832.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.832.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.832.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.832.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.832.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.832.0>: Starting to move bucket 14 vbucketmigrator<0.832.0>: Bucket 14 moved to the next server vbucketmigrator<0.832.0>: Validate bucket states vbucketmigrator<0.832.0>: 14 ok INFO REPORT <0.834.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.834.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.834.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.834.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.834.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.834.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.834.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.834.0>: Starting to move bucket 15 vbucketmigrator<0.834.0>: Bucket 15 moved to the next server vbucketmigrator<0.834.0>: Validate bucket states vbucketmigrator<0.834.0>: 15 ok INFO REPORT <0.836.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.836.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.836.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.836.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.836.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.836.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.836.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.836.0>: Starting to move bucket 16 vbucketmigrator<0.836.0>: Bucket 16 moved to the next server vbucketmigrator<0.836.0>: Validate bucket states vbucketmigrator<0.836.0>: 16 ok INFO REPORT <0.838.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.838.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.838.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.838.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.838.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.838.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.838.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.838.0>: Starting to move bucket 17 vbucketmigrator<0.838.0>: Bucket 17 moved to the next server vbucketmigrator<0.838.0>: Validate bucket states vbucketmigrator<0.838.0>: 17 ok INFO REPORT <0.840.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.840.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.840.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.840.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.840.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.840.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.840.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.840.0>: Starting to move bucket 18 vbucketmigrator<0.840.0>: Bucket 18 moved to the next server vbucketmigrator<0.840.0>: Validate bucket states vbucketmigrator<0.840.0>: 18 ok INFO REPORT <0.842.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.842.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.842.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.842.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.842.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.842.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.842.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.842.0>: Starting to move bucket 19 vbucketmigrator<0.842.0>: Bucket 19 moved to the next server vbucketmigrator<0.842.0>: Validate bucket states vbucketmigrator<0.842.0>: 19 ok INFO REPORT <0.844.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.844.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.844.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.844.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.844.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.844.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.844.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.844.0>: Starting to move bucket 20 vbucketmigrator<0.844.0>: Bucket 20 moved to the next server vbucketmigrator<0.844.0>: Validate bucket states vbucketmigrator<0.844.0>: 20 ok INFO REPORT <0.846.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.846.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.846.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.846.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.846.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.846.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.846.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.846.0>: Starting to move bucket 21 vbucketmigrator<0.846.0>: Bucket 21 moved to the next server vbucketmigrator<0.846.0>: Validate bucket states vbucketmigrator<0.846.0>: 21 ok INFO REPORT <0.848.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.848.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.848.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.848.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.848.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.848.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.848.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.848.0>: Starting to move bucket 22 vbucketmigrator<0.848.0>: Bucket 22 moved to the next server vbucketmigrator<0.848.0>: Validate bucket states vbucketmigrator<0.848.0>: 22 ok INFO REPORT <0.850.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.850.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.850.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.850.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.850.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.850.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.850.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.850.0>: Starting to move bucket 23 vbucketmigrator<0.850.0>: Bucket 23 moved to the next server vbucketmigrator<0.850.0>: Validate bucket states vbucketmigrator<0.850.0>: 23 ok INFO REPORT <0.852.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.852.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.852.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.852.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.852.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.852.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.852.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.852.0>: Starting to move bucket 24 vbucketmigrator<0.852.0>: Bucket 24 moved to the next server vbucketmigrator<0.852.0>: Validate bucket states vbucketmigrator<0.852.0>: 24 ok INFO REPORT <0.854.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.854.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.854.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.854.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.854.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.854.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.854.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.854.0>: Starting to move bucket 25 vbucketmigrator<0.854.0>: Bucket 25 moved to the next server vbucketmigrator<0.854.0>: Validate bucket states vbucketmigrator<0.854.0>: 25 ok INFO REPORT <0.856.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.856.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.856.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.856.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.856.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.856.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.856.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.856.0>: Starting to move bucket 26 vbucketmigrator<0.856.0>: Bucket 26 moved to the next server vbucketmigrator<0.856.0>: Validate bucket states vbucketmigrator<0.856.0>: 26 ok INFO REPORT <0.858.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.858.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.858.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.858.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.858.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.858.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.858.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.858.0>: Starting to move bucket 27 vbucketmigrator<0.858.0>: Bucket 27 moved to the next server vbucketmigrator<0.858.0>: Validate bucket states vbucketmigrator<0.858.0>: 27 ok INFO REPORT <0.860.0> 2011-01-03 12:56:39 =============================================================================== vbucketmigrator<0.860.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.860.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.860.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.860.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.860.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.860.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.860.0>: Starting to move bucket 28 vbucketmigrator<0.860.0>: Bucket 28 moved to the next server vbucketmigrator<0.860.0>: Validate bucket states vbucketmigrator<0.860.0>: 28 ok INFO REPORT <0.862.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.862.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.862.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.862.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.862.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.862.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.862.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.862.0>: Starting to move bucket 29 vbucketmigrator<0.862.0>: Bucket 29 moved to the next server vbucketmigrator<0.862.0>: Validate bucket states vbucketmigrator<0.862.0>: 29 ok INFO REPORT <0.864.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.864.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.864.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.864.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.864.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.864.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.864.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.864.0>: Starting to move bucket 30 vbucketmigrator<0.864.0>: Bucket 30 moved to the next server vbucketmigrator<0.864.0>: Validate bucket states vbucketmigrator<0.864.0>: 30 ok INFO REPORT <0.866.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.866.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.866.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.866.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.866.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.866.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.866.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.866.0>: Starting to move bucket 31 vbucketmigrator<0.866.0>: Bucket 31 moved to the next server vbucketmigrator<0.866.0>: Validate bucket states vbucketmigrator<0.866.0>: 31 ok INFO REPORT <0.868.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.868.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.868.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.868.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.868.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.868.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.868.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.868.0>: Starting to move bucket 32 vbucketmigrator<0.868.0>: Bucket 32 moved to the next server vbucketmigrator<0.868.0>: Validate bucket states vbucketmigrator<0.868.0>: 32 ok INFO REPORT <0.870.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.870.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.870.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.870.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.870.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.870.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.870.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.870.0>: Starting to move bucket 33 vbucketmigrator<0.870.0>: Bucket 33 moved to the next server vbucketmigrator<0.870.0>: Validate bucket states vbucketmigrator<0.870.0>: 33 ok INFO REPORT <0.872.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.872.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.872.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.872.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.872.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.872.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.872.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.872.0>: Starting to move bucket 34 vbucketmigrator<0.872.0>: Bucket 34 moved to the next server vbucketmigrator<0.872.0>: Validate bucket states vbucketmigrator<0.872.0>: 34 ok INFO REPORT <0.874.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.874.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.874.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.874.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.874.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.874.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.874.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.874.0>: Starting to move bucket 35 vbucketmigrator<0.874.0>: Bucket 35 moved to the next server vbucketmigrator<0.874.0>: Validate bucket states vbucketmigrator<0.874.0>: 35 ok INFO REPORT <0.876.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.876.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.876.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.876.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.876.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.876.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.876.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.876.0>: Starting to move bucket 36 vbucketmigrator<0.876.0>: Bucket 36 moved to the next server vbucketmigrator<0.876.0>: Validate bucket states vbucketmigrator<0.876.0>: 36 ok INFO REPORT <0.85.0> 2011-01-03 12:56:40 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.878.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.878.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.878.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.878.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.878.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.878.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.878.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.878.0>: Starting to move bucket 37 vbucketmigrator<0.878.0>: Bucket 37 moved to the next server vbucketmigrator<0.878.0>: Validate bucket states vbucketmigrator<0.878.0>: 37 ok INFO REPORT <0.880.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.880.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.880.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.880.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.880.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.880.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.880.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.880.0>: Starting to move bucket 38 vbucketmigrator<0.880.0>: Bucket 38 moved to the next server vbucketmigrator<0.880.0>: Validate bucket states vbucketmigrator<0.880.0>: 38 ok INFO REPORT <0.882.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.882.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.882.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.882.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.882.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.882.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.882.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.882.0>: Starting to move bucket 39 vbucketmigrator<0.882.0>: Bucket 39 moved to the next server vbucketmigrator<0.882.0>: Validate bucket states vbucketmigrator<0.882.0>: 39 ok INFO REPORT <0.884.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.884.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.884.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.884.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.884.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.884.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.884.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.884.0>: Starting to move bucket 40 vbucketmigrator<0.884.0>: Bucket 40 moved to the next server vbucketmigrator<0.884.0>: Validate bucket states vbucketmigrator<0.884.0>: 40 ok INFO REPORT <0.886.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.886.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.886.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.886.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.886.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.886.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.886.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.886.0>: Starting to move bucket 41 vbucketmigrator<0.886.0>: Bucket 41 moved to the next server vbucketmigrator<0.886.0>: Validate bucket states vbucketmigrator<0.886.0>: 41 ok INFO REPORT <0.888.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.888.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.888.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.888.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.888.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.888.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.888.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.888.0>: Starting to move bucket 42 vbucketmigrator<0.888.0>: Bucket 42 moved to the next server vbucketmigrator<0.888.0>: Validate bucket states vbucketmigrator<0.888.0>: 42 ok INFO REPORT <0.890.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.890.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.890.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.890.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.890.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.890.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.890.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.890.0>: Starting to move bucket 43 vbucketmigrator<0.890.0>: Bucket 43 moved to the next server vbucketmigrator<0.890.0>: Validate bucket states vbucketmigrator<0.890.0>: 43 ok INFO REPORT <0.892.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.892.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.892.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.892.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.892.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.892.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.892.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.892.0>: Starting to move bucket 44 vbucketmigrator<0.892.0>: Bucket 44 moved to the next server vbucketmigrator<0.892.0>: Validate bucket states vbucketmigrator<0.892.0>: 44 ok INFO REPORT <0.894.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.894.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.894.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.894.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.894.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.894.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.894.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.894.0>: Starting to move bucket 45 vbucketmigrator<0.894.0>: Bucket 45 moved to the next server vbucketmigrator<0.894.0>: Validate bucket states vbucketmigrator<0.894.0>: 45 ok INFO REPORT <0.896.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.896.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.896.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.896.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.896.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.896.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.896.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.896.0>: Starting to move bucket 46 vbucketmigrator<0.896.0>: Bucket 46 moved to the next server vbucketmigrator<0.896.0>: Validate bucket states vbucketmigrator<0.896.0>: 46 ok INFO REPORT <0.898.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.898.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.898.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.898.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.898.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.898.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.898.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.898.0>: Starting to move bucket 47 vbucketmigrator<0.898.0>: Bucket 47 moved to the next server vbucketmigrator<0.898.0>: Validate bucket states vbucketmigrator<0.898.0>: 47 ok INFO REPORT <0.900.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.900.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.900.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.900.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.900.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.900.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.900.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.900.0>: Starting to move bucket 48 vbucketmigrator<0.900.0>: Bucket 48 moved to the next server vbucketmigrator<0.900.0>: Validate bucket states vbucketmigrator<0.900.0>: 48 ok INFO REPORT <0.902.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.902.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.902.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.902.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.902.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.902.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.902.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.902.0>: Starting to move bucket 49 vbucketmigrator<0.902.0>: Bucket 49 moved to the next server vbucketmigrator<0.902.0>: Validate bucket states vbucketmigrator<0.902.0>: 49 ok INFO REPORT <0.904.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.904.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.904.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.904.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.904.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.904.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.904.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.904.0>: Starting to move bucket 50 vbucketmigrator<0.904.0>: Bucket 50 moved to the next server vbucketmigrator<0.904.0>: Validate bucket states vbucketmigrator<0.904.0>: 50 ok INFO REPORT <0.906.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.906.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.906.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.906.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.906.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.906.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.906.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.906.0>: Starting to move bucket 51 vbucketmigrator<0.906.0>: Bucket 51 moved to the next server vbucketmigrator<0.906.0>: Validate bucket states vbucketmigrator<0.906.0>: 51 ok INFO REPORT <0.908.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.908.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.908.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.908.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.908.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.908.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.908.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.908.0>: Starting to move bucket 52 vbucketmigrator<0.908.0>: Bucket 52 moved to the next server vbucketmigrator<0.908.0>: Validate bucket states vbucketmigrator<0.908.0>: 52 ok INFO REPORT <0.910.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.910.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.910.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.910.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.910.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.910.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.910.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.910.0>: Starting to move bucket 53 vbucketmigrator<0.910.0>: Bucket 53 moved to the next server vbucketmigrator<0.910.0>: Validate bucket states vbucketmigrator<0.910.0>: 53 ok INFO REPORT <0.912.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.912.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.912.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.912.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.912.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.912.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.912.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.912.0>: Starting to move bucket 54 vbucketmigrator<0.912.0>: Bucket 54 moved to the next server vbucketmigrator<0.912.0>: Validate bucket states vbucketmigrator<0.912.0>: 54 ok INFO REPORT <0.914.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.914.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.914.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.914.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.914.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.914.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.914.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.914.0>: Starting to move bucket 55 vbucketmigrator<0.914.0>: Bucket 55 moved to the next server vbucketmigrator<0.914.0>: Validate bucket states vbucketmigrator<0.914.0>: 55 ok INFO REPORT <0.916.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.916.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.916.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.916.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.916.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.916.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.916.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.916.0>: Starting to move bucket 56 vbucketmigrator<0.916.0>: Bucket 56 moved to the next server vbucketmigrator<0.916.0>: Validate bucket states vbucketmigrator<0.916.0>: 56 ok INFO REPORT <0.918.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.918.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.918.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.918.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.918.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.918.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.918.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.918.0>: Starting to move bucket 57 vbucketmigrator<0.918.0>: Bucket 57 moved to the next server vbucketmigrator<0.918.0>: Validate bucket states vbucketmigrator<0.918.0>: 57 ok INFO REPORT <0.920.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.920.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.920.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.920.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.920.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.920.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.920.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.920.0>: Starting to move bucket 58 vbucketmigrator<0.920.0>: Bucket 58 moved to the next server vbucketmigrator<0.920.0>: Validate bucket states vbucketmigrator<0.920.0>: 58 ok INFO REPORT <0.922.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.922.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.922.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.922.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.922.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.922.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.922.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.922.0>: Starting to move bucket 59 vbucketmigrator<0.922.0>: Bucket 59 moved to the next server vbucketmigrator<0.922.0>: Validate bucket states vbucketmigrator<0.922.0>: 59 ok INFO REPORT <0.924.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.924.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.924.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.924.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.924.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.924.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.924.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.924.0>: Starting to move bucket 60 vbucketmigrator<0.924.0>: Bucket 60 moved to the next server vbucketmigrator<0.924.0>: Validate bucket states vbucketmigrator<0.924.0>: 60 ok INFO REPORT <0.926.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.926.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.926.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.926.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.926.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.926.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.926.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.926.0>: Starting to move bucket 61 vbucketmigrator<0.926.0>: Bucket 61 moved to the next server vbucketmigrator<0.926.0>: Validate bucket states vbucketmigrator<0.926.0>: 61 ok INFO REPORT <0.928.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.928.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.928.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.928.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.928.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.928.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.928.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.928.0>: Starting to move bucket 62 vbucketmigrator<0.928.0>: Bucket 62 moved to the next server vbucketmigrator<0.928.0>: Validate bucket states vbucketmigrator<0.928.0>: 62 ok INFO REPORT <0.930.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.930.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.930.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.930.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.930.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.930.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.930.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.930.0>: Starting to move bucket 63 vbucketmigrator<0.930.0>: Bucket 63 moved to the next server vbucketmigrator<0.930.0>: Validate bucket states vbucketmigrator<0.930.0>: 63 ok INFO REPORT <0.935.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.935.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.935.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.935.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.935.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.935.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.935.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.935.0>: Starting to move bucket 64 vbucketmigrator<0.935.0>: Bucket 64 moved to the next server vbucketmigrator<0.935.0>: Validate bucket states vbucketmigrator<0.935.0>: 64 ok INFO REPORT <0.937.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.937.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.937.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.937.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.937.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.937.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.937.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.937.0>: Starting to move bucket 65 vbucketmigrator<0.937.0>: Bucket 65 moved to the next server vbucketmigrator<0.937.0>: Validate bucket states vbucketmigrator<0.937.0>: 65 ok INFO REPORT <0.939.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.939.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.939.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.939.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.939.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.939.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.939.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.939.0>: Starting to move bucket 66 vbucketmigrator<0.939.0>: Bucket 66 moved to the next server vbucketmigrator<0.939.0>: Validate bucket states vbucketmigrator<0.939.0>: 66 ok INFO REPORT <0.941.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.941.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.941.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.941.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.941.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.941.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.941.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.941.0>: Starting to move bucket 67 vbucketmigrator<0.941.0>: Bucket 67 moved to the next server vbucketmigrator<0.941.0>: Validate bucket states vbucketmigrator<0.941.0>: 67 ok INFO REPORT <0.943.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.943.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.943.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.943.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.943.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.943.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.943.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.943.0>: Starting to move bucket 68 vbucketmigrator<0.943.0>: Bucket 68 moved to the next server vbucketmigrator<0.943.0>: Validate bucket states vbucketmigrator<0.943.0>: 68 ok INFO REPORT <0.945.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.945.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.945.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.945.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.945.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.945.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.945.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.945.0>: Starting to move bucket 69 vbucketmigrator<0.945.0>: Bucket 69 moved to the next server vbucketmigrator<0.945.0>: Validate bucket states vbucketmigrator<0.945.0>: 69 ok INFO REPORT <0.947.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.947.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.947.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.947.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.947.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.947.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.947.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.947.0>: Starting to move bucket 70 vbucketmigrator<0.947.0>: Bucket 70 moved to the next server vbucketmigrator<0.947.0>: Validate bucket states vbucketmigrator<0.947.0>: 70 ok INFO REPORT <0.949.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.949.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.949.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.949.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.949.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.949.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.949.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.949.0>: Starting to move bucket 71 vbucketmigrator<0.949.0>: Bucket 71 moved to the next server vbucketmigrator<0.949.0>: Validate bucket states vbucketmigrator<0.949.0>: 71 ok INFO REPORT <0.951.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.951.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.951.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.951.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.951.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.951.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.951.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.951.0>: Starting to move bucket 72 vbucketmigrator<0.951.0>: Bucket 72 moved to the next server vbucketmigrator<0.951.0>: Validate bucket states vbucketmigrator<0.951.0>: 72 ok INFO REPORT <0.953.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.953.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.953.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.953.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.953.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.953.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.953.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.953.0>: Starting to move bucket 73 vbucketmigrator<0.953.0>: Bucket 73 moved to the next server vbucketmigrator<0.953.0>: Validate bucket states vbucketmigrator<0.953.0>: 73 ok INFO REPORT <0.955.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.955.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.955.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.955.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.955.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.955.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.955.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.955.0>: Starting to move bucket 74 vbucketmigrator<0.955.0>: Bucket 74 moved to the next server vbucketmigrator<0.955.0>: Validate bucket states vbucketmigrator<0.955.0>: 74 ok INFO REPORT <0.957.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.957.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.957.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.957.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.957.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.957.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.957.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.957.0>: Starting to move bucket 75 vbucketmigrator<0.957.0>: Bucket 75 moved to the next server vbucketmigrator<0.957.0>: Validate bucket states vbucketmigrator<0.957.0>: 75 ok INFO REPORT <0.959.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.959.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.959.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.959.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.959.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.959.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.959.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.959.0>: Starting to move bucket 76 vbucketmigrator<0.959.0>: Bucket 76 moved to the next server vbucketmigrator<0.959.0>: Validate bucket states vbucketmigrator<0.959.0>: 76 ok INFO REPORT <0.961.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.961.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.961.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.961.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.961.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.961.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.961.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.961.0>: Starting to move bucket 77 vbucketmigrator<0.961.0>: Bucket 77 moved to the next server vbucketmigrator<0.961.0>: Validate bucket states vbucketmigrator<0.961.0>: 77 ok INFO REPORT <0.963.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.963.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.963.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.963.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.963.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.963.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.963.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.963.0>: Starting to move bucket 78 vbucketmigrator<0.963.0>: Bucket 78 moved to the next server vbucketmigrator<0.963.0>: Validate bucket states vbucketmigrator<0.963.0>: 78 ok INFO REPORT <0.965.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.965.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.965.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.965.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.965.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.965.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.965.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.965.0>: Starting to move bucket 79 vbucketmigrator<0.965.0>: Bucket 79 moved to the next server vbucketmigrator<0.965.0>: Validate bucket states vbucketmigrator<0.965.0>: 79 ok INFO REPORT <0.967.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.967.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.967.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.967.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.967.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.967.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.967.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.967.0>: Starting to move bucket 80 vbucketmigrator<0.967.0>: Bucket 80 moved to the next server vbucketmigrator<0.967.0>: Validate bucket states vbucketmigrator<0.967.0>: 80 ok INFO REPORT <0.969.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.969.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.969.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.969.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.969.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.969.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.969.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.969.0>: Starting to move bucket 81 vbucketmigrator<0.969.0>: Bucket 81 moved to the next server vbucketmigrator<0.969.0>: Validate bucket states vbucketmigrator<0.969.0>: 81 ok INFO REPORT <0.971.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.971.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.971.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.971.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.971.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.971.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.971.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.971.0>: Starting to move bucket 82 vbucketmigrator<0.971.0>: Bucket 82 moved to the next server vbucketmigrator<0.971.0>: Validate bucket states vbucketmigrator<0.971.0>: 82 ok INFO REPORT <0.973.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.973.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.973.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.973.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.973.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.973.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.973.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.973.0>: Starting to move bucket 83 vbucketmigrator<0.973.0>: Bucket 83 moved to the next server vbucketmigrator<0.973.0>: Validate bucket states vbucketmigrator<0.973.0>: 83 ok INFO REPORT <0.975.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.975.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.975.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.975.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.975.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.975.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.975.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.975.0>: Starting to move bucket 84 vbucketmigrator<0.975.0>: Bucket 84 moved to the next server vbucketmigrator<0.975.0>: Validate bucket states vbucketmigrator<0.975.0>: 84 ok INFO REPORT <0.977.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.977.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.977.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.977.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.977.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.977.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.977.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.977.0>: Starting to move bucket 85 vbucketmigrator<0.977.0>: Bucket 85 moved to the next server vbucketmigrator<0.977.0>: Validate bucket states vbucketmigrator<0.977.0>: 85 ok INFO REPORT <0.979.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.979.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.979.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.979.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.979.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.979.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.979.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.979.0>: Starting to move bucket 86 vbucketmigrator<0.979.0>: Bucket 86 moved to the next server vbucketmigrator<0.979.0>: Validate bucket states vbucketmigrator<0.979.0>: 86 ok INFO REPORT <0.981.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.981.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.981.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.981.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.981.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.981.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.981.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.981.0>: Starting to move bucket 87 vbucketmigrator<0.981.0>: Bucket 87 moved to the next server vbucketmigrator<0.981.0>: Validate bucket states vbucketmigrator<0.981.0>: 87 ok INFO REPORT <0.983.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.983.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.983.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.983.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.983.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.983.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.983.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.983.0>: Starting to move bucket 88 vbucketmigrator<0.983.0>: Bucket 88 moved to the next server vbucketmigrator<0.983.0>: Validate bucket states vbucketmigrator<0.983.0>: 88 ok INFO REPORT <0.985.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.985.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.985.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.985.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.985.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.985.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.985.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.985.0>: Starting to move bucket 89 vbucketmigrator<0.985.0>: Bucket 89 moved to the next server vbucketmigrator<0.985.0>: Validate bucket states vbucketmigrator<0.985.0>: 89 ok INFO REPORT <0.987.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.987.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.987.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.987.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.987.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.987.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.987.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.987.0>: Starting to move bucket 90 vbucketmigrator<0.987.0>: Bucket 90 moved to the next server vbucketmigrator<0.987.0>: Validate bucket states vbucketmigrator<0.987.0>: 90 ok INFO REPORT <0.989.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.989.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.989.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.989.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.989.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.989.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.989.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.989.0>: Starting to move bucket 91 vbucketmigrator<0.989.0>: Bucket 91 moved to the next server vbucketmigrator<0.989.0>: Validate bucket states vbucketmigrator<0.989.0>: 91 ok INFO REPORT <0.991.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.991.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.991.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.991.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.991.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.991.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.991.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.991.0>: Starting to move bucket 92 vbucketmigrator<0.991.0>: Bucket 92 moved to the next server vbucketmigrator<0.991.0>: Validate bucket states vbucketmigrator<0.991.0>: 92 ok INFO REPORT <0.993.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.993.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.993.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.993.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.993.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.993.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.993.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.993.0>: Starting to move bucket 93 vbucketmigrator<0.993.0>: Bucket 93 moved to the next server vbucketmigrator<0.993.0>: Validate bucket states vbucketmigrator<0.993.0>: 93 ok INFO REPORT <0.995.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.995.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.995.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.995.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.995.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.995.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.995.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.995.0>: Starting to move bucket 94 vbucketmigrator<0.995.0>: Bucket 94 moved to the next server vbucketmigrator<0.995.0>: Validate bucket states vbucketmigrator<0.995.0>: 94 ok INFO REPORT <0.997.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.997.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.997.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.997.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.997.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.997.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.997.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.997.0>: Starting to move bucket 95 vbucketmigrator<0.997.0>: Bucket 95 moved to the next server vbucketmigrator<0.997.0>: Validate bucket states vbucketmigrator<0.997.0>: 95 ok INFO REPORT <0.999.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.999.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.999.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.999.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.999.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.999.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.999.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.999.0>: Starting to move bucket 96 vbucketmigrator<0.999.0>: Bucket 96 moved to the next server vbucketmigrator<0.999.0>: Validate bucket states vbucketmigrator<0.999.0>: 96 ok INFO REPORT <0.1001.0> 2011-01-03 12:56:40 =============================================================================== vbucketmigrator<0.1001.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1001.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1001.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1001.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1001.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1001.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1001.0>: Starting to move bucket 97 vbucketmigrator<0.1001.0>: Bucket 97 moved to the next server vbucketmigrator<0.1001.0>: Validate bucket states vbucketmigrator<0.1001.0>: 97 ok INFO REPORT <0.1003.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1003.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1003.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1003.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1003.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1003.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1003.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1003.0>: Starting to move bucket 98 vbucketmigrator<0.1003.0>: Bucket 98 moved to the next server vbucketmigrator<0.1003.0>: Validate bucket states vbucketmigrator<0.1003.0>: 98 ok INFO REPORT <0.1005.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1005.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1005.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1005.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1005.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1005.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1005.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1005.0>: Starting to move bucket 99 vbucketmigrator<0.1005.0>: Bucket 99 moved to the next server vbucketmigrator<0.1005.0>: Validate bucket states vbucketmigrator<0.1005.0>: 99 ok INFO REPORT <0.1007.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1007.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1007.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1007.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1007.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1007.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1007.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1007.0>: Starting to move bucket 100 vbucketmigrator<0.1007.0>: Bucket 100 moved to the next server vbucketmigrator<0.1007.0>: Validate bucket states vbucketmigrator<0.1007.0>: 100 ok INFO REPORT <0.1011.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1011.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1011.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1011.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1011.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1011.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1011.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1011.0>: Starting to move bucket 101 vbucketmigrator<0.1011.0>: Bucket 101 moved to the next server vbucketmigrator<0.1011.0>: Validate bucket states vbucketmigrator<0.1011.0>: 101 ok INFO REPORT <0.1013.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1013.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1013.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1013.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1013.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1013.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1013.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1013.0>: Starting to move bucket 102 vbucketmigrator<0.1013.0>: Bucket 102 moved to the next server vbucketmigrator<0.1013.0>: Validate bucket states vbucketmigrator<0.1013.0>: 102 ok INFO REPORT <0.1015.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1015.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1015.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1015.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1015.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1015.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1015.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1015.0>: Starting to move bucket 103 vbucketmigrator<0.1015.0>: Bucket 103 moved to the next server vbucketmigrator<0.1015.0>: Validate bucket states vbucketmigrator<0.1015.0>: 103 ok INFO REPORT <0.1017.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1017.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1017.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1017.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1017.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1017.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1017.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1017.0>: Starting to move bucket 104 vbucketmigrator<0.1017.0>: Bucket 104 moved to the next server vbucketmigrator<0.1017.0>: Validate bucket states vbucketmigrator<0.1017.0>: 104 ok INFO REPORT <0.1019.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1019.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1019.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1019.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1019.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1019.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1019.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1019.0>: Starting to move bucket 105 vbucketmigrator<0.1019.0>: Bucket 105 moved to the next server vbucketmigrator<0.1019.0>: Validate bucket states vbucketmigrator<0.1019.0>: 105 ok INFO REPORT <0.1021.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1021.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1021.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1021.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1021.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1021.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1021.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1021.0>: Starting to move bucket 106 vbucketmigrator<0.1021.0>: Bucket 106 moved to the next server vbucketmigrator<0.1021.0>: Validate bucket states vbucketmigrator<0.1021.0>: 106 ok INFO REPORT <0.1023.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1023.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1023.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1023.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1023.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1023.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1023.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1023.0>: Starting to move bucket 107 vbucketmigrator<0.1023.0>: Bucket 107 moved to the next server vbucketmigrator<0.1023.0>: Validate bucket states vbucketmigrator<0.1023.0>: 107 ok INFO REPORT <0.1025.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1025.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1025.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1025.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1025.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1025.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1025.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1025.0>: Starting to move bucket 108 vbucketmigrator<0.1025.0>: Bucket 108 moved to the next server vbucketmigrator<0.1025.0>: Validate bucket states vbucketmigrator<0.1025.0>: 108 ok INFO REPORT <0.1027.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1027.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1027.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1027.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1027.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1027.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1027.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1027.0>: Starting to move bucket 109 vbucketmigrator<0.1027.0>: Bucket 109 moved to the next server vbucketmigrator<0.1027.0>: Validate bucket states vbucketmigrator<0.1027.0>: 109 ok INFO REPORT <0.1029.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1029.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1029.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1029.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1029.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1029.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1029.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1029.0>: Starting to move bucket 110 vbucketmigrator<0.1029.0>: Bucket 110 moved to the next server vbucketmigrator<0.1029.0>: Validate bucket states vbucketmigrator<0.1029.0>: 110 ok INFO REPORT <0.1031.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1031.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1031.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1031.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1031.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1031.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1031.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1031.0>: Starting to move bucket 111 vbucketmigrator<0.1031.0>: Bucket 111 moved to the next server vbucketmigrator<0.1031.0>: Validate bucket states vbucketmigrator<0.1031.0>: 111 ok INFO REPORT <0.1033.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1033.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1033.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1033.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1033.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1033.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1033.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1033.0>: Starting to move bucket 112 vbucketmigrator<0.1033.0>: Bucket 112 moved to the next server vbucketmigrator<0.1033.0>: Validate bucket states vbucketmigrator<0.1033.0>: 112 ok INFO REPORT <0.1035.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1035.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1035.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1035.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1035.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1035.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1035.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1035.0>: Starting to move bucket 113 vbucketmigrator<0.1035.0>: Bucket 113 moved to the next server vbucketmigrator<0.1035.0>: Validate bucket states vbucketmigrator<0.1035.0>: 113 ok INFO REPORT <0.1037.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1037.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1037.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1037.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1037.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1037.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1037.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1037.0>: Starting to move bucket 114 vbucketmigrator<0.1037.0>: Bucket 114 moved to the next server vbucketmigrator<0.1037.0>: Validate bucket states vbucketmigrator<0.1037.0>: 114 ok INFO REPORT <0.1039.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1039.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1039.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1039.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1039.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1039.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1039.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1039.0>: Starting to move bucket 115 vbucketmigrator<0.1039.0>: Bucket 115 moved to the next server vbucketmigrator<0.1039.0>: Validate bucket states vbucketmigrator<0.1039.0>: 115 ok ERROR REPORT <0.1008.0> 2011-01-03 12:56:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.1041.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1041.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1041.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1041.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1041.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1041.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1041.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1041.0>: Starting to move bucket 116 vbucketmigrator<0.1041.0>: Bucket 116 moved to the next server vbucketmigrator<0.1041.0>: Validate bucket states vbucketmigrator<0.1041.0>: 116 ok INFO REPORT <0.1043.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1043.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1043.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1043.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1043.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1043.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1043.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1043.0>: Starting to move bucket 117 vbucketmigrator<0.1043.0>: Bucket 117 moved to the next server vbucketmigrator<0.1043.0>: Validate bucket states vbucketmigrator<0.1043.0>: 117 ok INFO REPORT <0.1045.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1045.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1045.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1045.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1045.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1045.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1045.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1045.0>: Starting to move bucket 118 vbucketmigrator<0.1045.0>: Bucket 118 moved to the next server vbucketmigrator<0.1045.0>: Validate bucket states vbucketmigrator<0.1045.0>: 118 ok INFO REPORT <0.1047.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1047.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1047.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1047.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1047.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1047.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1047.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1047.0>: Starting to move bucket 119 vbucketmigrator<0.1047.0>: Bucket 119 moved to the next server vbucketmigrator<0.1047.0>: Validate bucket states vbucketmigrator<0.1047.0>: 119 ok INFO REPORT <0.1049.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1049.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1049.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1049.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1049.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1049.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1049.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1049.0>: Starting to move bucket 120 vbucketmigrator<0.1049.0>: Bucket 120 moved to the next server vbucketmigrator<0.1049.0>: Validate bucket states vbucketmigrator<0.1049.0>: 120 ok INFO REPORT <0.1051.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1051.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1051.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1051.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1051.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1051.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1051.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1051.0>: Starting to move bucket 121 vbucketmigrator<0.1051.0>: Bucket 121 moved to the next server vbucketmigrator<0.1051.0>: Validate bucket states vbucketmigrator<0.1051.0>: 121 ok INFO REPORT <0.1053.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1053.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1053.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1053.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1053.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1053.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1053.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1053.0>: Starting to move bucket 122 vbucketmigrator<0.1053.0>: Bucket 122 moved to the next server vbucketmigrator<0.1053.0>: Validate bucket states vbucketmigrator<0.1053.0>: 122 ok INFO REPORT <0.1055.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1055.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1055.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1055.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1055.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1055.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1055.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1055.0>: Starting to move bucket 123 vbucketmigrator<0.1055.0>: Bucket 123 moved to the next server vbucketmigrator<0.1055.0>: Validate bucket states vbucketmigrator<0.1055.0>: 123 ok INFO REPORT <0.1057.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1057.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1057.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1057.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1057.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1057.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1057.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1057.0>: Starting to move bucket 124 vbucketmigrator<0.1057.0>: Bucket 124 moved to the next server vbucketmigrator<0.1057.0>: Validate bucket states vbucketmigrator<0.1057.0>: 124 ok INFO REPORT <0.1059.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1059.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1059.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1059.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1059.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1059.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1059.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1059.0>: Starting to move bucket 125 vbucketmigrator<0.1059.0>: Bucket 125 moved to the next server vbucketmigrator<0.1059.0>: Validate bucket states vbucketmigrator<0.1059.0>: 125 ok INFO REPORT <0.1061.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1061.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1061.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1061.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1061.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1061.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1061.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1061.0>: Starting to move bucket 126 vbucketmigrator<0.1061.0>: Bucket 126 moved to the next server vbucketmigrator<0.1061.0>: Validate bucket states vbucketmigrator<0.1061.0>: 126 ok INFO REPORT <0.1063.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1063.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1063.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1063.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1063.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1063.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1063.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1063.0>: Starting to move bucket 127 vbucketmigrator<0.1063.0>: Bucket 127 moved to the next server vbucketmigrator<0.1063.0>: Validate bucket states vbucketmigrator<0.1063.0>: 127 ok INFO REPORT <0.1065.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1065.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1065.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1065.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1065.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1065.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1065.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1065.0>: Starting to move bucket 128 vbucketmigrator<0.1065.0>: Bucket 128 moved to the next server vbucketmigrator<0.1065.0>: Validate bucket states vbucketmigrator<0.1065.0>: 128 ok INFO REPORT <0.1067.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1067.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1067.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1067.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1067.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1067.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1067.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1067.0>: Starting to move bucket 129 vbucketmigrator<0.1067.0>: Bucket 129 moved to the next server vbucketmigrator<0.1067.0>: Validate bucket states vbucketmigrator<0.1067.0>: 129 ok INFO REPORT <0.1069.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1069.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1069.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1069.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1069.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1069.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1069.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1069.0>: Starting to move bucket 130 vbucketmigrator<0.1069.0>: Bucket 130 moved to the next server vbucketmigrator<0.1069.0>: Validate bucket states vbucketmigrator<0.1069.0>: 130 ok INFO REPORT <0.1071.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1071.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1071.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1071.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1071.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1071.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1071.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1071.0>: Starting to move bucket 131 vbucketmigrator<0.1071.0>: Bucket 131 moved to the next server vbucketmigrator<0.1071.0>: Validate bucket states vbucketmigrator<0.1071.0>: 131 ok INFO REPORT <0.1073.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1073.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1073.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1073.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1073.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1073.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1073.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1073.0>: Starting to move bucket 132 vbucketmigrator<0.1073.0>: Bucket 132 moved to the next server vbucketmigrator<0.1073.0>: Validate bucket states vbucketmigrator<0.1073.0>: 132 ok INFO REPORT <0.1075.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1075.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1075.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1075.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1075.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1075.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1075.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1075.0>: Starting to move bucket 133 vbucketmigrator<0.1075.0>: Bucket 133 moved to the next server vbucketmigrator<0.1075.0>: Validate bucket states vbucketmigrator<0.1075.0>: 133 ok INFO REPORT <0.1077.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1077.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1077.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1077.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1077.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1077.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1077.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1077.0>: Starting to move bucket 134 vbucketmigrator<0.1077.0>: Bucket 134 moved to the next server vbucketmigrator<0.1077.0>: Validate bucket states vbucketmigrator<0.1077.0>: 134 ok INFO REPORT <0.1079.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1079.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1079.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1079.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1079.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1079.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1079.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1079.0>: Starting to move bucket 135 vbucketmigrator<0.1079.0>: Bucket 135 moved to the next server vbucketmigrator<0.1079.0>: Validate bucket states vbucketmigrator<0.1079.0>: 135 ok INFO REPORT <0.1081.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1081.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1081.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1081.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1081.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1081.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1081.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1081.0>: Starting to move bucket 136 vbucketmigrator<0.1081.0>: Bucket 136 moved to the next server vbucketmigrator<0.1081.0>: Validate bucket states vbucketmigrator<0.1081.0>: 136 ok INFO REPORT <0.1083.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1083.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1083.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1083.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1083.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1083.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1083.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1083.0>: Starting to move bucket 137 vbucketmigrator<0.1083.0>: Bucket 137 moved to the next server vbucketmigrator<0.1083.0>: Validate bucket states vbucketmigrator<0.1083.0>: 137 ok INFO REPORT <0.1085.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1085.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1085.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1085.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1085.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1085.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1085.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1085.0>: Starting to move bucket 138 vbucketmigrator<0.1085.0>: Bucket 138 moved to the next server vbucketmigrator<0.1085.0>: Validate bucket states vbucketmigrator<0.1085.0>: 138 ok INFO REPORT <0.1088.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1088.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1088.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1088.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1088.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1088.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1088.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1088.0>: Starting to move bucket 139 vbucketmigrator<0.1088.0>: Bucket 139 moved to the next server vbucketmigrator<0.1088.0>: Validate bucket states vbucketmigrator<0.1088.0>: 139 ok INFO REPORT <0.1090.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1090.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1090.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1090.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1090.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1090.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1090.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1090.0>: Starting to move bucket 140 vbucketmigrator<0.1090.0>: Bucket 140 moved to the next server vbucketmigrator<0.1090.0>: Validate bucket states vbucketmigrator<0.1090.0>: 140 ok INFO REPORT <0.1092.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1092.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1092.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1092.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1092.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1092.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1092.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1092.0>: Starting to move bucket 141 vbucketmigrator<0.1092.0>: Bucket 141 moved to the next server vbucketmigrator<0.1092.0>: Validate bucket states vbucketmigrator<0.1092.0>: 141 ok INFO REPORT <0.1094.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1094.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1094.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1094.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1094.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1094.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1094.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1094.0>: Starting to move bucket 142 vbucketmigrator<0.1094.0>: Bucket 142 moved to the next server vbucketmigrator<0.1094.0>: Validate bucket states vbucketmigrator<0.1094.0>: 142 ok INFO REPORT <0.1096.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1096.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1096.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1096.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1096.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1096.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1096.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1096.0>: Starting to move bucket 143 vbucketmigrator<0.1096.0>: Bucket 143 moved to the next server vbucketmigrator<0.1096.0>: Validate bucket states vbucketmigrator<0.1096.0>: 143 ok INFO REPORT <0.1098.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1098.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1098.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1098.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1098.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1098.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1098.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1098.0>: Starting to move bucket 144 vbucketmigrator<0.1098.0>: Bucket 144 moved to the next server vbucketmigrator<0.1098.0>: Validate bucket states vbucketmigrator<0.1098.0>: 144 ok INFO REPORT <0.1100.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1100.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1100.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1100.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1100.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1100.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1100.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1100.0>: Starting to move bucket 145 vbucketmigrator<0.1100.0>: Bucket 145 moved to the next server vbucketmigrator<0.1100.0>: Validate bucket states vbucketmigrator<0.1100.0>: 145 ok INFO REPORT <0.1102.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1102.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1102.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1102.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1102.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1102.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1102.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1102.0>: Starting to move bucket 146 vbucketmigrator<0.1102.0>: Bucket 146 moved to the next server vbucketmigrator<0.1102.0>: Validate bucket states vbucketmigrator<0.1102.0>: 146 ok INFO REPORT <0.1104.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1104.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1104.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1104.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1104.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1104.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1104.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1104.0>: Starting to move bucket 147 vbucketmigrator<0.1104.0>: Bucket 147 moved to the next server vbucketmigrator<0.1104.0>: Validate bucket states vbucketmigrator<0.1104.0>: 147 ok INFO REPORT <0.1106.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1106.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1106.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1106.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1106.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1106.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1106.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1106.0>: Starting to move bucket 148 vbucketmigrator<0.1106.0>: Bucket 148 moved to the next server vbucketmigrator<0.1106.0>: Validate bucket states vbucketmigrator<0.1106.0>: 148 ok INFO REPORT <0.1108.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1108.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1108.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1108.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1108.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1108.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1108.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1108.0>: Starting to move bucket 149 vbucketmigrator<0.1108.0>: Bucket 149 moved to the next server vbucketmigrator<0.1108.0>: Validate bucket states vbucketmigrator<0.1108.0>: 149 ok INFO REPORT <0.1110.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1110.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1110.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1110.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1110.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1110.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1110.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1110.0>: Starting to move bucket 150 vbucketmigrator<0.1110.0>: Bucket 150 moved to the next server vbucketmigrator<0.1110.0>: Validate bucket states vbucketmigrator<0.1110.0>: 150 ok INFO REPORT <0.1112.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1112.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1112.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1112.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1112.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1112.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1112.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1112.0>: Starting to move bucket 151 vbucketmigrator<0.1112.0>: Bucket 151 moved to the next server vbucketmigrator<0.1112.0>: Validate bucket states vbucketmigrator<0.1112.0>: 151 ok INFO REPORT <0.1114.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1114.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1114.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1114.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1114.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1114.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1114.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1114.0>: Starting to move bucket 152 vbucketmigrator<0.1114.0>: Bucket 152 moved to the next server vbucketmigrator<0.1114.0>: Validate bucket states vbucketmigrator<0.1114.0>: 152 ok INFO REPORT <0.1116.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1116.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1116.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1116.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1116.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1116.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1116.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1116.0>: Starting to move bucket 153 vbucketmigrator<0.1116.0>: Bucket 153 moved to the next server vbucketmigrator<0.1116.0>: Validate bucket states vbucketmigrator<0.1116.0>: 153 ok INFO REPORT <0.1118.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1118.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1118.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1118.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1118.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1118.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1118.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1118.0>: Starting to move bucket 154 vbucketmigrator<0.1118.0>: Bucket 154 moved to the next server vbucketmigrator<0.1118.0>: Validate bucket states vbucketmigrator<0.1118.0>: 154 ok INFO REPORT <0.1120.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1120.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1120.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1120.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1120.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1120.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1120.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1120.0>: Starting to move bucket 155 vbucketmigrator<0.1120.0>: Bucket 155 moved to the next server vbucketmigrator<0.1120.0>: Validate bucket states vbucketmigrator<0.1120.0>: 155 ok INFO REPORT <0.1122.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1122.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1122.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1122.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1122.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1122.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1122.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1122.0>: Starting to move bucket 156 vbucketmigrator<0.1122.0>: Bucket 156 moved to the next server vbucketmigrator<0.1122.0>: Validate bucket states vbucketmigrator<0.1122.0>: 156 ok INFO REPORT <0.1124.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1124.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1124.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1124.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1124.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1124.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1124.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1124.0>: Starting to move bucket 157 vbucketmigrator<0.1124.0>: Bucket 157 moved to the next server vbucketmigrator<0.1124.0>: Validate bucket states vbucketmigrator<0.1124.0>: 157 ok INFO REPORT <0.1126.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1126.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1126.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1126.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1126.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1126.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1126.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1126.0>: Starting to move bucket 158 vbucketmigrator<0.1126.0>: Bucket 158 moved to the next server vbucketmigrator<0.1126.0>: Validate bucket states vbucketmigrator<0.1126.0>: 158 ok INFO REPORT <0.1128.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1128.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1128.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1128.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1128.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1128.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1128.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1128.0>: Starting to move bucket 159 vbucketmigrator<0.1128.0>: Bucket 159 moved to the next server vbucketmigrator<0.1128.0>: Validate bucket states vbucketmigrator<0.1128.0>: 159 ok INFO REPORT <0.1130.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1130.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1130.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1130.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1130.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1130.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1130.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1130.0>: Starting to move bucket 160 vbucketmigrator<0.1130.0>: Bucket 160 moved to the next server vbucketmigrator<0.1130.0>: Validate bucket states vbucketmigrator<0.1130.0>: 160 ok INFO REPORT <0.1132.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1132.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1132.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1132.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1132.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1132.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1132.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1132.0>: Starting to move bucket 161 vbucketmigrator<0.1132.0>: Bucket 161 moved to the next server vbucketmigrator<0.1132.0>: Validate bucket states vbucketmigrator<0.1132.0>: 161 ok INFO REPORT <0.1134.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1134.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1134.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1134.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1134.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1134.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1134.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1134.0>: Starting to move bucket 162 vbucketmigrator<0.1134.0>: Bucket 162 moved to the next server vbucketmigrator<0.1134.0>: Validate bucket states vbucketmigrator<0.1134.0>: 162 ok INFO REPORT <0.1136.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1136.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1136.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1136.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1136.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1136.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1136.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1136.0>: Starting to move bucket 163 vbucketmigrator<0.1136.0>: Bucket 163 moved to the next server vbucketmigrator<0.1136.0>: Validate bucket states vbucketmigrator<0.1136.0>: 163 ok INFO REPORT <0.1138.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1138.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1138.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1138.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1138.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1138.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1138.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1138.0>: Starting to move bucket 164 vbucketmigrator<0.1138.0>: Bucket 164 moved to the next server vbucketmigrator<0.1138.0>: Validate bucket states vbucketmigrator<0.1138.0>: 164 ok INFO REPORT <0.1140.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1140.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1140.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1140.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1140.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1140.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1140.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1140.0>: Starting to move bucket 165 vbucketmigrator<0.1140.0>: Bucket 165 moved to the next server vbucketmigrator<0.1140.0>: Validate bucket states vbucketmigrator<0.1140.0>: 165 ok INFO REPORT <0.1142.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1142.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1142.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1142.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1142.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1142.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1142.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1142.0>: Starting to move bucket 166 vbucketmigrator<0.1142.0>: Bucket 166 moved to the next server vbucketmigrator<0.1142.0>: Validate bucket states vbucketmigrator<0.1142.0>: 166 ok INFO REPORT <0.1144.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1144.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1144.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1144.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1144.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1144.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1144.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1144.0>: Starting to move bucket 167 vbucketmigrator<0.1144.0>: Bucket 167 moved to the next server vbucketmigrator<0.1144.0>: Validate bucket states vbucketmigrator<0.1144.0>: 167 ok INFO REPORT <0.1146.0> 2011-01-03 12:56:41 =============================================================================== vbucketmigrator<0.1146.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1146.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1146.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1146.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1146.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1146.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1146.0>: Starting to move bucket 168 vbucketmigrator<0.1146.0>: Bucket 168 moved to the next server vbucketmigrator<0.1146.0>: Validate bucket states vbucketmigrator<0.1146.0>: 168 ok INFO REPORT <0.1148.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1148.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1148.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1148.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1148.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1148.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1148.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1148.0>: Starting to move bucket 169 vbucketmigrator<0.1148.0>: Bucket 169 moved to the next server vbucketmigrator<0.1148.0>: Validate bucket states vbucketmigrator<0.1148.0>: 169 ok INFO REPORT <0.1150.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1150.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1150.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1150.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1150.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1150.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1150.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1150.0>: Starting to move bucket 170 vbucketmigrator<0.1150.0>: Bucket 170 moved to the next server vbucketmigrator<0.1150.0>: Validate bucket states vbucketmigrator<0.1150.0>: 170 ok INFO REPORT <0.1152.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1152.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1152.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1152.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1152.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1152.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1152.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1152.0>: Starting to move bucket 171 vbucketmigrator<0.1152.0>: Bucket 171 moved to the next server vbucketmigrator<0.1152.0>: Validate bucket states vbucketmigrator<0.1152.0>: 171 ok INFO REPORT <0.1154.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1154.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1154.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1154.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1154.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1154.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1154.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1154.0>: Starting to move bucket 172 vbucketmigrator<0.1154.0>: Bucket 172 moved to the next server vbucketmigrator<0.1154.0>: Validate bucket states vbucketmigrator<0.1154.0>: 172 ok INFO REPORT <0.1156.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1156.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1156.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1156.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1156.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1156.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1156.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1156.0>: Starting to move bucket 173 vbucketmigrator<0.1156.0>: Bucket 173 moved to the next server vbucketmigrator<0.1156.0>: Validate bucket states vbucketmigrator<0.1156.0>: 173 ok INFO REPORT <0.1158.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1158.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1158.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1158.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1158.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1158.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1158.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1158.0>: Starting to move bucket 174 vbucketmigrator<0.1158.0>: Bucket 174 moved to the next server vbucketmigrator<0.1158.0>: Validate bucket states vbucketmigrator<0.1158.0>: 174 ok INFO REPORT <0.1160.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1160.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1160.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1160.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1160.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1160.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1160.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1160.0>: Starting to move bucket 175 vbucketmigrator<0.1160.0>: Bucket 175 moved to the next server vbucketmigrator<0.1160.0>: Validate bucket states vbucketmigrator<0.1160.0>: 175 ok INFO REPORT <0.1162.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1162.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1162.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1162.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1162.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1162.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1162.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1162.0>: Starting to move bucket 176 vbucketmigrator<0.1162.0>: Bucket 176 moved to the next server vbucketmigrator<0.1162.0>: Validate bucket states vbucketmigrator<0.1162.0>: 176 ok INFO REPORT <0.1164.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1164.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1164.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1164.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1164.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1164.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1164.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1164.0>: Starting to move bucket 177 vbucketmigrator<0.1164.0>: Bucket 177 moved to the next server vbucketmigrator<0.1164.0>: Validate bucket states vbucketmigrator<0.1164.0>: 177 ok INFO REPORT <0.1166.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1166.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1166.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1166.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1166.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1166.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1166.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1166.0>: Starting to move bucket 178 vbucketmigrator<0.1166.0>: Bucket 178 moved to the next server vbucketmigrator<0.1166.0>: Validate bucket states vbucketmigrator<0.1166.0>: 178 ok INFO REPORT <0.1168.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1168.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1168.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1168.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1168.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1168.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1168.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1168.0>: Starting to move bucket 179 vbucketmigrator<0.1168.0>: Bucket 179 moved to the next server vbucketmigrator<0.1168.0>: Validate bucket states vbucketmigrator<0.1168.0>: 179 ok INFO REPORT <0.1170.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1170.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1170.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1170.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1170.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1170.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1170.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1170.0>: Starting to move bucket 180 vbucketmigrator<0.1170.0>: Bucket 180 moved to the next server vbucketmigrator<0.1170.0>: Validate bucket states vbucketmigrator<0.1170.0>: 180 ok INFO REPORT <0.1172.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1172.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1172.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1172.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1172.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1172.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1172.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1172.0>: Starting to move bucket 181 vbucketmigrator<0.1172.0>: Bucket 181 moved to the next server vbucketmigrator<0.1172.0>: Validate bucket states vbucketmigrator<0.1172.0>: 181 ok INFO REPORT <0.1174.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1174.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1174.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1174.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1174.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1174.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1174.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1174.0>: Starting to move bucket 182 vbucketmigrator<0.1174.0>: Bucket 182 moved to the next server vbucketmigrator<0.1174.0>: Validate bucket states vbucketmigrator<0.1174.0>: 182 ok INFO REPORT <0.1176.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1176.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1176.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1176.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1176.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1176.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1176.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1176.0>: Starting to move bucket 183 vbucketmigrator<0.1176.0>: Bucket 183 moved to the next server vbucketmigrator<0.1176.0>: Validate bucket states vbucketmigrator<0.1176.0>: 183 ok INFO REPORT <0.1178.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1178.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1178.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1178.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1178.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1178.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1178.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1178.0>: Starting to move bucket 184 vbucketmigrator<0.1178.0>: Bucket 184 moved to the next server vbucketmigrator<0.1178.0>: Validate bucket states vbucketmigrator<0.1178.0>: 184 ok INFO REPORT <0.1180.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1180.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1180.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1180.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1180.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1180.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1180.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1180.0>: Starting to move bucket 185 vbucketmigrator<0.1180.0>: Bucket 185 moved to the next server vbucketmigrator<0.1180.0>: Validate bucket states vbucketmigrator<0.1180.0>: 185 ok INFO REPORT <0.1182.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1182.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1182.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1182.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1182.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1182.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1182.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1182.0>: Starting to move bucket 186 vbucketmigrator<0.1182.0>: Bucket 186 moved to the next server vbucketmigrator<0.1182.0>: Validate bucket states vbucketmigrator<0.1182.0>: 186 ok INFO REPORT <0.1184.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1184.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1184.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1184.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1184.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1184.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1184.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1184.0>: Starting to move bucket 187 vbucketmigrator<0.1184.0>: Bucket 187 moved to the next server vbucketmigrator<0.1184.0>: Validate bucket states vbucketmigrator<0.1184.0>: 187 ok INFO REPORT <0.1186.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1186.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1186.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1186.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1186.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1186.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1186.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1186.0>: Starting to move bucket 188 vbucketmigrator<0.1186.0>: Bucket 188 moved to the next server vbucketmigrator<0.1186.0>: Validate bucket states vbucketmigrator<0.1186.0>: 188 ok INFO REPORT <0.1188.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1188.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1188.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1188.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1188.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1188.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1188.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1188.0>: Starting to move bucket 189 vbucketmigrator<0.1188.0>: Bucket 189 moved to the next server vbucketmigrator<0.1188.0>: Validate bucket states vbucketmigrator<0.1188.0>: 189 ok INFO REPORT <0.1190.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1190.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1190.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1190.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1190.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1190.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1190.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1190.0>: Starting to move bucket 190 vbucketmigrator<0.1190.0>: Bucket 190 moved to the next server vbucketmigrator<0.1190.0>: Validate bucket states vbucketmigrator<0.1190.0>: 190 ok INFO REPORT <0.1192.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1192.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1192.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1192.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1192.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1192.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1192.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1192.0>: Starting to move bucket 191 vbucketmigrator<0.1192.0>: Bucket 191 moved to the next server vbucketmigrator<0.1192.0>: Validate bucket states vbucketmigrator<0.1192.0>: 191 ok INFO REPORT <0.1194.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1194.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1194.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1194.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1194.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1194.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1194.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1194.0>: Starting to move bucket 192 vbucketmigrator<0.1194.0>: Bucket 192 moved to the next server vbucketmigrator<0.1194.0>: Validate bucket states vbucketmigrator<0.1194.0>: 192 ok INFO REPORT <0.1196.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1196.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1196.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1196.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1196.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1196.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1196.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1196.0>: Starting to move bucket 193 vbucketmigrator<0.1196.0>: Bucket 193 moved to the next server vbucketmigrator<0.1196.0>: Validate bucket states vbucketmigrator<0.1196.0>: 193 ok INFO REPORT <0.1198.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1198.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1198.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1198.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1198.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1198.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1198.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1198.0>: Starting to move bucket 194 vbucketmigrator<0.1198.0>: Bucket 194 moved to the next server vbucketmigrator<0.1198.0>: Validate bucket states vbucketmigrator<0.1198.0>: 194 ok INFO REPORT <0.1200.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1200.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1200.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1200.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1200.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1200.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1200.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1200.0>: Starting to move bucket 195 vbucketmigrator<0.1200.0>: Bucket 195 moved to the next server vbucketmigrator<0.1200.0>: Validate bucket states vbucketmigrator<0.1200.0>: 195 ok INFO REPORT <0.1202.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1202.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1202.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1202.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1202.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1202.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1202.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1202.0>: Starting to move bucket 196 vbucketmigrator<0.1202.0>: Bucket 196 moved to the next server vbucketmigrator<0.1202.0>: Validate bucket states vbucketmigrator<0.1202.0>: 196 ok INFO REPORT <0.1204.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1204.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1204.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1204.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1204.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1204.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1204.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1204.0>: Starting to move bucket 197 vbucketmigrator<0.1204.0>: Bucket 197 moved to the next server vbucketmigrator<0.1204.0>: Validate bucket states vbucketmigrator<0.1204.0>: 197 ok INFO REPORT <0.1206.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1206.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1206.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1206.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1206.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1206.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1206.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1206.0>: Starting to move bucket 198 vbucketmigrator<0.1206.0>: Bucket 198 moved to the next server vbucketmigrator<0.1206.0>: Validate bucket states vbucketmigrator<0.1206.0>: 198 ok INFO REPORT <0.1208.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1208.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1208.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1208.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1208.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1208.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1208.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1208.0>: Starting to move bucket 199 vbucketmigrator<0.1208.0>: Bucket 199 moved to the next server vbucketmigrator<0.1208.0>: Validate bucket states vbucketmigrator<0.1208.0>: 199 ok INFO REPORT <0.1210.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1210.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1210.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1210.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1210.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1210.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1210.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1210.0>: Starting to move bucket 200 vbucketmigrator<0.1210.0>: Bucket 200 moved to the next server vbucketmigrator<0.1210.0>: Validate bucket states vbucketmigrator<0.1210.0>: 200 ok INFO REPORT <0.1212.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1212.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1212.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1212.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1212.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1212.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1212.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1212.0>: Starting to move bucket 201 vbucketmigrator<0.1212.0>: Bucket 201 moved to the next server vbucketmigrator<0.1212.0>: Validate bucket states vbucketmigrator<0.1212.0>: 201 ok INFO REPORT <0.1216.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1216.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1216.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1216.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1216.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1216.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1216.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1216.0>: Starting to move bucket 202 vbucketmigrator<0.1216.0>: Bucket 202 moved to the next server vbucketmigrator<0.1216.0>: Validate bucket states vbucketmigrator<0.1216.0>: 202 ok INFO REPORT <0.1218.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1218.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1218.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1218.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1218.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1218.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1218.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1218.0>: Starting to move bucket 203 vbucketmigrator<0.1218.0>: Bucket 203 moved to the next server vbucketmigrator<0.1218.0>: Validate bucket states vbucketmigrator<0.1218.0>: 203 ok INFO REPORT <0.1220.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1220.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1220.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1220.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1220.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1220.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1220.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1220.0>: Starting to move bucket 204 vbucketmigrator<0.1220.0>: Bucket 204 moved to the next server vbucketmigrator<0.1220.0>: Validate bucket states vbucketmigrator<0.1220.0>: 204 ok INFO REPORT <0.1222.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1222.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1222.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1222.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1222.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1222.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1222.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1222.0>: Starting to move bucket 205 vbucketmigrator<0.1222.0>: Bucket 205 moved to the next server vbucketmigrator<0.1222.0>: Validate bucket states vbucketmigrator<0.1222.0>: 205 ok INFO REPORT <0.1224.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1224.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1224.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1224.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1224.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1224.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1224.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1224.0>: Starting to move bucket 206 vbucketmigrator<0.1224.0>: Bucket 206 moved to the next server vbucketmigrator<0.1224.0>: Validate bucket states vbucketmigrator<0.1224.0>: 206 ok INFO REPORT <0.1226.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1226.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1226.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1226.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1226.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1226.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1226.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1226.0>: Starting to move bucket 207 vbucketmigrator<0.1226.0>: Bucket 207 moved to the next server vbucketmigrator<0.1226.0>: Validate bucket states vbucketmigrator<0.1226.0>: 207 ok INFO REPORT <0.1228.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1228.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1228.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1228.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1228.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1228.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1228.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1228.0>: Starting to move bucket 208 vbucketmigrator<0.1228.0>: Bucket 208 moved to the next server vbucketmigrator<0.1228.0>: Validate bucket states vbucketmigrator<0.1228.0>: 208 ok INFO REPORT <0.1230.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1230.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1230.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1230.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1230.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1230.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1230.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1230.0>: Starting to move bucket 209 vbucketmigrator<0.1230.0>: Bucket 209 moved to the next server vbucketmigrator<0.1230.0>: Validate bucket states vbucketmigrator<0.1230.0>: 209 ok INFO REPORT <0.1232.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1232.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1232.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1232.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1232.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1232.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1232.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1232.0>: Starting to move bucket 210 vbucketmigrator<0.1232.0>: Bucket 210 moved to the next server vbucketmigrator<0.1232.0>: Validate bucket states vbucketmigrator<0.1232.0>: 210 ok INFO REPORT <0.1234.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1234.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1234.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1234.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1234.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1234.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1234.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1234.0>: Starting to move bucket 211 vbucketmigrator<0.1234.0>: Bucket 211 moved to the next server vbucketmigrator<0.1234.0>: Validate bucket states vbucketmigrator<0.1234.0>: 211 ok INFO REPORT <0.1236.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1236.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1236.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1236.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1236.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1236.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1236.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1236.0>: Starting to move bucket 212 vbucketmigrator<0.1236.0>: Bucket 212 moved to the next server vbucketmigrator<0.1236.0>: Validate bucket states vbucketmigrator<0.1236.0>: 212 ok INFO REPORT <0.1238.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1238.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1238.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1238.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1238.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1238.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1238.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1238.0>: Starting to move bucket 213 vbucketmigrator<0.1238.0>: Bucket 213 moved to the next server vbucketmigrator<0.1238.0>: Validate bucket states vbucketmigrator<0.1238.0>: 213 ok INFO REPORT <0.1240.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1240.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1240.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1240.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1240.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1240.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1240.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1240.0>: Starting to move bucket 214 vbucketmigrator<0.1240.0>: Bucket 214 moved to the next server vbucketmigrator<0.1240.0>: Validate bucket states vbucketmigrator<0.1240.0>: 214 ok INFO REPORT <0.1242.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1242.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1242.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1242.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1242.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1242.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1242.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1242.0>: Starting to move bucket 215 vbucketmigrator<0.1242.0>: Bucket 215 moved to the next server vbucketmigrator<0.1242.0>: Validate bucket states vbucketmigrator<0.1242.0>: 215 ok ERROR REPORT <0.1214.0> 2011-01-03 12:56:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.1244.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1244.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1244.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1244.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1244.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1244.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1244.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1244.0>: Starting to move bucket 216 vbucketmigrator<0.1244.0>: Bucket 216 moved to the next server vbucketmigrator<0.1244.0>: Validate bucket states vbucketmigrator<0.1244.0>: 216 ok INFO REPORT <0.1246.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1246.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1246.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1246.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1246.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1246.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1246.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1246.0>: Starting to move bucket 217 vbucketmigrator<0.1246.0>: Bucket 217 moved to the next server vbucketmigrator<0.1246.0>: Validate bucket states vbucketmigrator<0.1246.0>: 217 ok INFO REPORT <0.1248.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1248.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1248.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1248.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1248.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1248.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1248.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1248.0>: Starting to move bucket 218 vbucketmigrator<0.1248.0>: Bucket 218 moved to the next server vbucketmigrator<0.1248.0>: Validate bucket states vbucketmigrator<0.1248.0>: 218 ok INFO REPORT <0.1250.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1250.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1250.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1250.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1250.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1250.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1250.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1250.0>: Starting to move bucket 219 vbucketmigrator<0.1250.0>: Bucket 219 moved to the next server vbucketmigrator<0.1250.0>: Validate bucket states vbucketmigrator<0.1250.0>: 219 ok INFO REPORT <0.1252.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1252.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1252.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1252.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1252.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1252.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1252.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1252.0>: Starting to move bucket 220 vbucketmigrator<0.1252.0>: Bucket 220 moved to the next server vbucketmigrator<0.1252.0>: Validate bucket states vbucketmigrator<0.1252.0>: 220 ok INFO REPORT <0.1254.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1254.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1254.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1254.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1254.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1254.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1254.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1254.0>: Starting to move bucket 221 vbucketmigrator<0.1254.0>: Bucket 221 moved to the next server vbucketmigrator<0.1254.0>: Validate bucket states vbucketmigrator<0.1254.0>: 221 ok INFO REPORT <0.1256.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1256.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1256.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1256.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1256.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1256.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1256.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1256.0>: Starting to move bucket 222 vbucketmigrator<0.1256.0>: Bucket 222 moved to the next server vbucketmigrator<0.1256.0>: Validate bucket states vbucketmigrator<0.1256.0>: 222 ok INFO REPORT <0.1258.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1258.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1258.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1258.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1258.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1258.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1258.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1258.0>: Starting to move bucket 223 vbucketmigrator<0.1258.0>: Bucket 223 moved to the next server vbucketmigrator<0.1258.0>: Validate bucket states vbucketmigrator<0.1258.0>: 223 ok INFO REPORT <0.1260.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1260.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1260.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1260.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1260.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1260.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1260.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1260.0>: Starting to move bucket 224 vbucketmigrator<0.1260.0>: Bucket 224 moved to the next server vbucketmigrator<0.1260.0>: Validate bucket states vbucketmigrator<0.1260.0>: 224 ok INFO REPORT <0.1262.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1262.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1262.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1262.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1262.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1262.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1262.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1262.0>: Starting to move bucket 225 vbucketmigrator<0.1262.0>: Bucket 225 moved to the next server vbucketmigrator<0.1262.0>: Validate bucket states vbucketmigrator<0.1262.0>: 225 ok INFO REPORT <0.1264.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1264.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1264.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1264.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1264.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1264.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1264.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1264.0>: Starting to move bucket 226 vbucketmigrator<0.1264.0>: Bucket 226 moved to the next server vbucketmigrator<0.1264.0>: Validate bucket states vbucketmigrator<0.1264.0>: 226 ok INFO REPORT <0.1266.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1266.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1266.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1266.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1266.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1266.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1266.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1266.0>: Starting to move bucket 227 vbucketmigrator<0.1266.0>: Bucket 227 moved to the next server vbucketmigrator<0.1266.0>: Validate bucket states vbucketmigrator<0.1266.0>: 227 ok INFO REPORT <0.1268.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1268.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1268.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1268.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1268.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1268.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1268.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1268.0>: Starting to move bucket 228 vbucketmigrator<0.1268.0>: Bucket 228 moved to the next server vbucketmigrator<0.1268.0>: Validate bucket states vbucketmigrator<0.1268.0>: 228 ok INFO REPORT <0.1270.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1270.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1270.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1270.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1270.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1270.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1270.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1270.0>: Starting to move bucket 229 vbucketmigrator<0.1270.0>: Bucket 229 moved to the next server vbucketmigrator<0.1270.0>: Validate bucket states vbucketmigrator<0.1270.0>: 229 ok INFO REPORT <0.1272.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1272.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1272.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1272.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1272.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1272.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1272.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1272.0>: Starting to move bucket 230 vbucketmigrator<0.1272.0>: Bucket 230 moved to the next server vbucketmigrator<0.1272.0>: Validate bucket states vbucketmigrator<0.1272.0>: 230 ok INFO REPORT <0.1274.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1274.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1274.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1274.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1274.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1274.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1274.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1274.0>: Starting to move bucket 231 vbucketmigrator<0.1274.0>: Bucket 231 moved to the next server vbucketmigrator<0.1274.0>: Validate bucket states vbucketmigrator<0.1274.0>: 231 ok INFO REPORT <0.1276.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1276.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1276.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1276.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1276.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1276.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1276.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1276.0>: Starting to move bucket 232 vbucketmigrator<0.1276.0>: Bucket 232 moved to the next server vbucketmigrator<0.1276.0>: Validate bucket states vbucketmigrator<0.1276.0>: 232 ok INFO REPORT <0.1278.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1278.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1278.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1278.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1278.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1278.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1278.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1278.0>: Starting to move bucket 233 vbucketmigrator<0.1278.0>: Bucket 233 moved to the next server vbucketmigrator<0.1278.0>: Validate bucket states vbucketmigrator<0.1278.0>: 233 ok INFO REPORT <0.1280.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1280.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1280.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1280.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1280.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1280.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1280.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1280.0>: Starting to move bucket 234 vbucketmigrator<0.1280.0>: Bucket 234 moved to the next server vbucketmigrator<0.1280.0>: Validate bucket states vbucketmigrator<0.1280.0>: 234 ok INFO REPORT <0.1282.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1282.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1282.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1282.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1282.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1282.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1282.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1282.0>: Starting to move bucket 235 vbucketmigrator<0.1282.0>: Bucket 235 moved to the next server vbucketmigrator<0.1282.0>: Validate bucket states vbucketmigrator<0.1282.0>: 235 ok INFO REPORT <0.1284.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1284.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1284.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1284.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1284.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1284.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1284.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1284.0>: Starting to move bucket 236 vbucketmigrator<0.1284.0>: Bucket 236 moved to the next server vbucketmigrator<0.1284.0>: Validate bucket states vbucketmigrator<0.1284.0>: 236 ok INFO REPORT <0.1286.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1286.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1286.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1286.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1286.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1286.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1286.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1286.0>: Starting to move bucket 237 vbucketmigrator<0.1286.0>: Bucket 237 moved to the next server vbucketmigrator<0.1286.0>: Validate bucket states vbucketmigrator<0.1286.0>: 237 ok INFO REPORT <0.1288.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1288.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1288.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1288.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1288.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1288.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1288.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1288.0>: Starting to move bucket 238 vbucketmigrator<0.1288.0>: Bucket 238 moved to the next server vbucketmigrator<0.1288.0>: Validate bucket states vbucketmigrator<0.1288.0>: 238 ok INFO REPORT <0.1290.0> 2011-01-03 12:56:42 =============================================================================== vbucketmigrator<0.1290.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1290.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1290.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1290.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1290.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1290.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1290.0>: Starting to move bucket 239 vbucketmigrator<0.1290.0>: Bucket 239 moved to the next server vbucketmigrator<0.1290.0>: Validate bucket states vbucketmigrator<0.1290.0>: 239 ok INFO REPORT <0.1292.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1292.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1292.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1292.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1292.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1292.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1292.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1292.0>: Starting to move bucket 240 vbucketmigrator<0.1292.0>: Bucket 240 moved to the next server vbucketmigrator<0.1292.0>: Validate bucket states vbucketmigrator<0.1292.0>: 240 ok INFO REPORT <0.1294.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1294.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1294.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1294.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1294.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1294.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1294.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1294.0>: Starting to move bucket 241 vbucketmigrator<0.1294.0>: Bucket 241 moved to the next server vbucketmigrator<0.1294.0>: Validate bucket states vbucketmigrator<0.1294.0>: 241 ok INFO REPORT <0.1296.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1296.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1296.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1296.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1296.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1296.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1296.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1296.0>: Starting to move bucket 242 vbucketmigrator<0.1296.0>: Bucket 242 moved to the next server vbucketmigrator<0.1296.0>: Validate bucket states vbucketmigrator<0.1296.0>: 242 ok INFO REPORT <0.1298.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1298.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1298.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1298.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1298.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1298.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1298.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1298.0>: Starting to move bucket 243 vbucketmigrator<0.1298.0>: Bucket 243 moved to the next server vbucketmigrator<0.1298.0>: Validate bucket states vbucketmigrator<0.1298.0>: 243 ok INFO REPORT <0.1300.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1300.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1300.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1300.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1300.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1300.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1300.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1300.0>: Starting to move bucket 244 vbucketmigrator<0.1300.0>: Bucket 244 moved to the next server vbucketmigrator<0.1300.0>: Validate bucket states vbucketmigrator<0.1300.0>: 244 ok INFO REPORT <0.1302.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1302.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1302.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1302.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1302.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1302.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1302.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1302.0>: Starting to move bucket 245 vbucketmigrator<0.1302.0>: Bucket 245 moved to the next server vbucketmigrator<0.1302.0>: Validate bucket states vbucketmigrator<0.1302.0>: 245 ok INFO REPORT <0.1304.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1304.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1304.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1304.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1304.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1304.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1304.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1304.0>: Starting to move bucket 246 vbucketmigrator<0.1304.0>: Bucket 246 moved to the next server vbucketmigrator<0.1304.0>: Validate bucket states vbucketmigrator<0.1304.0>: 246 ok INFO REPORT <0.1306.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1306.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1306.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1306.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1306.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1306.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1306.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1306.0>: Starting to move bucket 247 vbucketmigrator<0.1306.0>: Bucket 247 moved to the next server vbucketmigrator<0.1306.0>: Validate bucket states vbucketmigrator<0.1306.0>: 247 ok INFO REPORT <0.1308.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1308.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1308.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1308.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1308.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1308.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1308.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1308.0>: Starting to move bucket 248 vbucketmigrator<0.1308.0>: Bucket 248 moved to the next server vbucketmigrator<0.1308.0>: Validate bucket states vbucketmigrator<0.1308.0>: 248 ok INFO REPORT <0.1310.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1310.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1310.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1310.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1310.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1310.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1310.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1310.0>: Starting to move bucket 249 vbucketmigrator<0.1310.0>: Bucket 249 moved to the next server vbucketmigrator<0.1310.0>: Validate bucket states vbucketmigrator<0.1310.0>: 249 ok INFO REPORT <0.1312.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1312.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1312.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1312.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1312.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1312.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1312.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1312.0>: Starting to move bucket 250 vbucketmigrator<0.1312.0>: Bucket 250 moved to the next server vbucketmigrator<0.1312.0>: Validate bucket states vbucketmigrator<0.1312.0>: 250 ok INFO REPORT <0.1314.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1314.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1314.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1314.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1314.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1314.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1314.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1314.0>: Starting to move bucket 251 vbucketmigrator<0.1314.0>: Bucket 251 moved to the next server vbucketmigrator<0.1314.0>: Validate bucket states vbucketmigrator<0.1314.0>: 251 ok INFO REPORT <0.1316.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1316.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1316.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1316.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1316.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1316.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1316.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1316.0>: Starting to move bucket 252 vbucketmigrator<0.1316.0>: Bucket 252 moved to the next server vbucketmigrator<0.1316.0>: Validate bucket states vbucketmigrator<0.1316.0>: 252 ok INFO REPORT <0.1318.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1318.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1318.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1318.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1318.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1318.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1318.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1318.0>: Starting to move bucket 253 vbucketmigrator<0.1318.0>: Bucket 253 moved to the next server vbucketmigrator<0.1318.0>: Validate bucket states vbucketmigrator<0.1318.0>: 253 ok INFO REPORT <0.1320.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1320.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1320.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1320.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1320.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1320.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1320.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1320.0>: Starting to move bucket 254 vbucketmigrator<0.1320.0>: Bucket 254 moved to the next server vbucketmigrator<0.1320.0>: Validate bucket states vbucketmigrator<0.1320.0>: 254 ok INFO REPORT <0.1322.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1322.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1322.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1322.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1322.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1322.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1322.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1322.0>: Starting to move bucket 255 vbucketmigrator<0.1322.0>: Bucket 255 moved to the next server vbucketmigrator<0.1322.0>: Validate bucket states vbucketmigrator<0.1322.0>: 255 ok INFO REPORT <0.1324.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1324.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1324.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1324.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1324.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1324.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1324.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1324.0>: Starting to move bucket 256 vbucketmigrator<0.1324.0>: Bucket 256 moved to the next server vbucketmigrator<0.1324.0>: Validate bucket states vbucketmigrator<0.1324.0>: 256 ok INFO REPORT <0.1326.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1326.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1326.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1326.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1326.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1326.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1326.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1326.0>: Starting to move bucket 257 vbucketmigrator<0.1326.0>: Bucket 257 moved to the next server vbucketmigrator<0.1326.0>: Validate bucket states vbucketmigrator<0.1326.0>: 257 ok INFO REPORT <0.1328.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1328.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1328.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1328.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1328.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1328.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1328.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1328.0>: Starting to move bucket 258 vbucketmigrator<0.1328.0>: Bucket 258 moved to the next server vbucketmigrator<0.1328.0>: Validate bucket states vbucketmigrator<0.1328.0>: 258 ok INFO REPORT <0.1330.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1330.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1330.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1330.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1330.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1330.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1330.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1330.0>: Starting to move bucket 259 vbucketmigrator<0.1330.0>: Bucket 259 moved to the next server vbucketmigrator<0.1330.0>: Validate bucket states vbucketmigrator<0.1330.0>: 259 ok INFO REPORT <0.1332.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1332.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1332.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1332.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1332.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1332.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1332.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1332.0>: Starting to move bucket 260 vbucketmigrator<0.1332.0>: Bucket 260 moved to the next server vbucketmigrator<0.1332.0>: Validate bucket states vbucketmigrator<0.1332.0>: 260 ok INFO REPORT <0.1334.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1334.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1334.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1334.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1334.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1334.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1334.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1334.0>: Starting to move bucket 261 vbucketmigrator<0.1334.0>: Bucket 261 moved to the next server vbucketmigrator<0.1334.0>: Validate bucket states vbucketmigrator<0.1334.0>: 261 ok INFO REPORT <0.1336.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1336.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1336.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1336.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1336.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1336.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1336.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1336.0>: Starting to move bucket 262 vbucketmigrator<0.1336.0>: Bucket 262 moved to the next server vbucketmigrator<0.1336.0>: Validate bucket states vbucketmigrator<0.1336.0>: 262 ok INFO REPORT <0.1338.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1338.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1338.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1338.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1338.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1338.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1338.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1338.0>: Starting to move bucket 263 vbucketmigrator<0.1338.0>: Bucket 263 moved to the next server vbucketmigrator<0.1338.0>: Validate bucket states vbucketmigrator<0.1338.0>: 263 ok INFO REPORT <0.1340.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1340.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1340.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1340.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1340.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1340.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1340.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1340.0>: Starting to move bucket 264 vbucketmigrator<0.1340.0>: Bucket 264 moved to the next server vbucketmigrator<0.1340.0>: Validate bucket states vbucketmigrator<0.1340.0>: 264 ok INFO REPORT <0.1342.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1342.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1342.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1342.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1342.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1342.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1342.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1342.0>: Starting to move bucket 265 vbucketmigrator<0.1342.0>: Bucket 265 moved to the next server vbucketmigrator<0.1342.0>: Validate bucket states vbucketmigrator<0.1342.0>: 265 ok INFO REPORT <0.1344.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1344.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1344.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1344.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1344.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1344.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1344.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1344.0>: Starting to move bucket 266 vbucketmigrator<0.1344.0>: Bucket 266 moved to the next server vbucketmigrator<0.1344.0>: Validate bucket states vbucketmigrator<0.1344.0>: 266 ok INFO REPORT <0.1346.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1346.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1346.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1346.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1346.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1346.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1346.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1346.0>: Starting to move bucket 267 vbucketmigrator<0.1346.0>: Bucket 267 moved to the next server vbucketmigrator<0.1346.0>: Validate bucket states vbucketmigrator<0.1346.0>: 267 ok INFO REPORT <0.1348.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1348.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1348.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1348.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1348.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1348.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1348.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1348.0>: Starting to move bucket 268 vbucketmigrator<0.1348.0>: Bucket 268 moved to the next server vbucketmigrator<0.1348.0>: Validate bucket states vbucketmigrator<0.1348.0>: 268 ok INFO REPORT <0.1350.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1350.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1350.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1350.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1350.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1350.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1350.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1350.0>: Starting to move bucket 269 vbucketmigrator<0.1350.0>: Bucket 269 moved to the next server vbucketmigrator<0.1350.0>: Validate bucket states vbucketmigrator<0.1350.0>: 269 ok INFO REPORT <0.1352.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1352.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1352.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1352.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1352.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1352.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1352.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1352.0>: Starting to move bucket 270 vbucketmigrator<0.1352.0>: Bucket 270 moved to the next server vbucketmigrator<0.1352.0>: Validate bucket states vbucketmigrator<0.1352.0>: 270 ok INFO REPORT <0.1354.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1354.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1354.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1354.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1354.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1354.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1354.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1354.0>: Starting to move bucket 271 vbucketmigrator<0.1354.0>: Bucket 271 moved to the next server vbucketmigrator<0.1354.0>: Validate bucket states vbucketmigrator<0.1354.0>: 271 ok INFO REPORT <0.1356.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1356.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1356.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1356.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1356.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1356.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1356.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1356.0>: Starting to move bucket 272 vbucketmigrator<0.1356.0>: Bucket 272 moved to the next server vbucketmigrator<0.1356.0>: Validate bucket states vbucketmigrator<0.1356.0>: 272 ok INFO REPORT <0.1358.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1358.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1358.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1358.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1358.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1358.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1358.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1358.0>: Starting to move bucket 273 vbucketmigrator<0.1358.0>: Bucket 273 moved to the next server vbucketmigrator<0.1358.0>: Validate bucket states vbucketmigrator<0.1358.0>: 273 ok INFO REPORT <0.1360.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1360.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1360.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1360.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1360.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1360.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1360.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1360.0>: Starting to move bucket 274 vbucketmigrator<0.1360.0>: Bucket 274 moved to the next server vbucketmigrator<0.1360.0>: Validate bucket states vbucketmigrator<0.1360.0>: 274 ok INFO REPORT <0.1362.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1362.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1362.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1362.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1362.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1362.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1362.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1362.0>: Starting to move bucket 275 vbucketmigrator<0.1362.0>: Bucket 275 moved to the next server vbucketmigrator<0.1362.0>: Validate bucket states vbucketmigrator<0.1362.0>: 275 ok INFO REPORT <0.1364.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1364.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1364.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1364.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1364.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1364.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1364.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1364.0>: Starting to move bucket 276 vbucketmigrator<0.1364.0>: Bucket 276 moved to the next server vbucketmigrator<0.1364.0>: Validate bucket states vbucketmigrator<0.1364.0>: 276 ok INFO REPORT <0.1366.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1366.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1366.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1366.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1366.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1366.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1366.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1366.0>: Starting to move bucket 277 vbucketmigrator<0.1366.0>: Bucket 277 moved to the next server vbucketmigrator<0.1366.0>: Validate bucket states vbucketmigrator<0.1366.0>: 277 ok INFO REPORT <0.1371.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1371.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1371.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1371.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1371.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1371.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1371.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1371.0>: Starting to move bucket 278 vbucketmigrator<0.1371.0>: Bucket 278 moved to the next server vbucketmigrator<0.1371.0>: Validate bucket states vbucketmigrator<0.1371.0>: 278 ok INFO REPORT <0.1373.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1373.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1373.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1373.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1373.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1373.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1373.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1373.0>: Starting to move bucket 279 vbucketmigrator<0.1373.0>: Bucket 279 moved to the next server vbucketmigrator<0.1373.0>: Validate bucket states vbucketmigrator<0.1373.0>: 279 ok INFO REPORT <0.1375.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1375.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1375.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1375.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1375.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1375.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1375.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1375.0>: Starting to move bucket 280 vbucketmigrator<0.1375.0>: Bucket 280 moved to the next server vbucketmigrator<0.1375.0>: Validate bucket states vbucketmigrator<0.1375.0>: 280 ok INFO REPORT <0.1379.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1379.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1379.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1379.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1379.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1379.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1379.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1379.0>: Starting to move bucket 281 vbucketmigrator<0.1379.0>: Bucket 281 moved to the next server vbucketmigrator<0.1379.0>: Validate bucket states vbucketmigrator<0.1379.0>: 281 ok INFO REPORT <0.1381.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1381.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1381.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1381.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1381.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1381.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1381.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1381.0>: Starting to move bucket 282 vbucketmigrator<0.1381.0>: Bucket 282 moved to the next server vbucketmigrator<0.1381.0>: Validate bucket states vbucketmigrator<0.1381.0>: 282 ok INFO REPORT <0.1383.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1383.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1383.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1383.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1383.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1383.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1383.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1383.0>: Starting to move bucket 283 vbucketmigrator<0.1383.0>: Bucket 283 moved to the next server vbucketmigrator<0.1383.0>: Validate bucket states vbucketmigrator<0.1383.0>: 283 ok INFO REPORT <0.1385.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1385.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1385.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1385.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1385.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1385.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1385.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1385.0>: Starting to move bucket 284 vbucketmigrator<0.1385.0>: Bucket 284 moved to the next server vbucketmigrator<0.1385.0>: Validate bucket states vbucketmigrator<0.1385.0>: 284 ok INFO REPORT <0.1387.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1387.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1387.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1387.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1387.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1387.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1387.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1387.0>: Starting to move bucket 285 vbucketmigrator<0.1387.0>: Bucket 285 moved to the next server vbucketmigrator<0.1387.0>: Validate bucket states vbucketmigrator<0.1387.0>: 285 ok INFO REPORT <0.1389.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1389.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1389.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1389.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1389.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1389.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1389.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1389.0>: Starting to move bucket 286 vbucketmigrator<0.1389.0>: Bucket 286 moved to the next server vbucketmigrator<0.1389.0>: Validate bucket states vbucketmigrator<0.1389.0>: 286 ok INFO REPORT <0.1391.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1391.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1391.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1391.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1391.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1391.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1391.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1391.0>: Starting to move bucket 287 vbucketmigrator<0.1391.0>: Bucket 287 moved to the next server vbucketmigrator<0.1391.0>: Validate bucket states vbucketmigrator<0.1391.0>: 287 ok INFO REPORT <0.1393.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1393.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1393.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1393.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1393.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1393.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1393.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1393.0>: Starting to move bucket 288 vbucketmigrator<0.1393.0>: Bucket 288 moved to the next server vbucketmigrator<0.1393.0>: Validate bucket states vbucketmigrator<0.1393.0>: 288 ok INFO REPORT <0.1395.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1395.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1395.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1395.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1395.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1395.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1395.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1395.0>: Starting to move bucket 289 vbucketmigrator<0.1395.0>: Bucket 289 moved to the next server vbucketmigrator<0.1395.0>: Validate bucket states vbucketmigrator<0.1395.0>: 289 ok INFO REPORT <0.1397.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1397.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1397.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1397.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1397.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1397.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1397.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1397.0>: Starting to move bucket 290 vbucketmigrator<0.1397.0>: Bucket 290 moved to the next server vbucketmigrator<0.1397.0>: Validate bucket states vbucketmigrator<0.1397.0>: 290 ok INFO REPORT <0.1399.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1399.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1399.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1399.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1399.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1399.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1399.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1399.0>: Starting to move bucket 291 vbucketmigrator<0.1399.0>: Bucket 291 moved to the next server vbucketmigrator<0.1399.0>: Validate bucket states vbucketmigrator<0.1399.0>: 291 ok INFO REPORT <0.1401.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1401.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1401.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1401.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1401.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1401.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1401.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1401.0>: Starting to move bucket 292 vbucketmigrator<0.1401.0>: Bucket 292 moved to the next server vbucketmigrator<0.1401.0>: Validate bucket states vbucketmigrator<0.1401.0>: 292 ok INFO REPORT <0.1403.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1403.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1403.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1403.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1403.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1403.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1403.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1403.0>: Starting to move bucket 293 vbucketmigrator<0.1403.0>: Bucket 293 moved to the next server vbucketmigrator<0.1403.0>: Validate bucket states vbucketmigrator<0.1403.0>: 293 ok INFO REPORT <0.1405.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1405.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1405.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1405.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1405.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1405.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1405.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1405.0>: Starting to move bucket 294 vbucketmigrator<0.1405.0>: Bucket 294 moved to the next server vbucketmigrator<0.1405.0>: Validate bucket states vbucketmigrator<0.1405.0>: 294 ok INFO REPORT <0.1407.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1407.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1407.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1407.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1407.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1407.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1407.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1407.0>: Starting to move bucket 295 vbucketmigrator<0.1407.0>: Bucket 295 moved to the next server vbucketmigrator<0.1407.0>: Validate bucket states vbucketmigrator<0.1407.0>: 295 ok INFO REPORT <0.1409.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1409.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1409.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1409.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1409.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1409.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1409.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1409.0>: Starting to move bucket 296 vbucketmigrator<0.1409.0>: Bucket 296 moved to the next server vbucketmigrator<0.1409.0>: Validate bucket states vbucketmigrator<0.1409.0>: 296 ok INFO REPORT <0.1411.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1411.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1411.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1411.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1411.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1411.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1411.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1411.0>: Starting to move bucket 297 vbucketmigrator<0.1411.0>: Bucket 297 moved to the next server vbucketmigrator<0.1411.0>: Validate bucket states vbucketmigrator<0.1411.0>: 297 ok INFO REPORT <0.1413.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1413.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1413.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1413.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1413.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1413.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1413.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1413.0>: Starting to move bucket 298 vbucketmigrator<0.1413.0>: Bucket 298 moved to the next server vbucketmigrator<0.1413.0>: Validate bucket states vbucketmigrator<0.1413.0>: 298 ok INFO REPORT <0.1415.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1415.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1415.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1415.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1415.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1415.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1415.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1415.0>: Starting to move bucket 299 vbucketmigrator<0.1415.0>: Bucket 299 moved to the next server vbucketmigrator<0.1415.0>: Validate bucket states vbucketmigrator<0.1415.0>: 299 ok INFO REPORT <0.1417.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1417.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1417.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1417.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1417.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1417.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1417.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1417.0>: Starting to move bucket 300 vbucketmigrator<0.1417.0>: Bucket 300 moved to the next server vbucketmigrator<0.1417.0>: Validate bucket states vbucketmigrator<0.1417.0>: 300 ok INFO REPORT <0.1419.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1419.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1419.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1419.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1419.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1419.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1419.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1419.0>: Starting to move bucket 301 vbucketmigrator<0.1419.0>: Bucket 301 moved to the next server vbucketmigrator<0.1419.0>: Validate bucket states vbucketmigrator<0.1419.0>: 301 ok INFO REPORT <0.1421.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1421.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1421.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1421.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1421.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1421.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1421.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1421.0>: Starting to move bucket 302 vbucketmigrator<0.1421.0>: Bucket 302 moved to the next server vbucketmigrator<0.1421.0>: Validate bucket states vbucketmigrator<0.1421.0>: 302 ok INFO REPORT <0.1423.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1423.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1423.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1423.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1423.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1423.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1423.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1423.0>: Starting to move bucket 303 vbucketmigrator<0.1423.0>: Bucket 303 moved to the next server vbucketmigrator<0.1423.0>: Validate bucket states vbucketmigrator<0.1423.0>: 303 ok INFO REPORT <0.1425.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1425.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1425.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1425.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1425.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1425.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1425.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1425.0>: Starting to move bucket 304 vbucketmigrator<0.1425.0>: Bucket 304 moved to the next server vbucketmigrator<0.1425.0>: Validate bucket states vbucketmigrator<0.1425.0>: 304 ok INFO REPORT <0.1429.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1429.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1429.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1429.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1429.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1429.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1429.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1429.0>: Starting to move bucket 305 vbucketmigrator<0.1429.0>: Bucket 305 moved to the next server vbucketmigrator<0.1429.0>: Validate bucket states vbucketmigrator<0.1429.0>: 305 ok INFO REPORT <0.1431.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1431.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1431.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1431.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1431.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1431.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1431.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1431.0>: Starting to move bucket 306 vbucketmigrator<0.1431.0>: Bucket 306 moved to the next server vbucketmigrator<0.1431.0>: Validate bucket states vbucketmigrator<0.1431.0>: 306 ok INFO REPORT <0.1433.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1433.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1433.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1433.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1433.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1433.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1433.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1433.0>: Starting to move bucket 307 vbucketmigrator<0.1433.0>: Bucket 307 moved to the next server vbucketmigrator<0.1433.0>: Validate bucket states vbucketmigrator<0.1433.0>: 307 ok INFO REPORT <0.1435.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1435.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1435.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1435.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1435.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1435.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1435.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1435.0>: Starting to move bucket 308 vbucketmigrator<0.1435.0>: Bucket 308 moved to the next server vbucketmigrator<0.1435.0>: Validate bucket states vbucketmigrator<0.1435.0>: 308 ok INFO REPORT <0.1437.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1437.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1437.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1437.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1437.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1437.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1437.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1437.0>: Starting to move bucket 309 vbucketmigrator<0.1437.0>: Bucket 309 moved to the next server vbucketmigrator<0.1437.0>: Validate bucket states vbucketmigrator<0.1437.0>: 309 ok INFO REPORT <0.1439.0> 2011-01-03 12:56:43 =============================================================================== vbucketmigrator<0.1439.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1439.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1439.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1439.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1439.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1439.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1439.0>: Starting to move bucket 310 vbucketmigrator<0.1439.0>: Bucket 310 moved to the next server vbucketmigrator<0.1439.0>: Validate bucket states vbucketmigrator<0.1439.0>: 310 ok INFO REPORT <0.1441.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1441.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1441.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1441.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1441.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1441.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1441.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1441.0>: Starting to move bucket 311 vbucketmigrator<0.1441.0>: Bucket 311 moved to the next server vbucketmigrator<0.1441.0>: Validate bucket states vbucketmigrator<0.1441.0>: 311 ok INFO REPORT <0.1443.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1443.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1443.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1443.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1443.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1443.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1443.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1443.0>: Starting to move bucket 312 vbucketmigrator<0.1443.0>: Bucket 312 moved to the next server vbucketmigrator<0.1443.0>: Validate bucket states vbucketmigrator<0.1443.0>: 312 ok INFO REPORT <0.1445.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1445.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1445.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1445.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1445.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1445.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1445.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1445.0>: Starting to move bucket 313 vbucketmigrator<0.1445.0>: Bucket 313 moved to the next server vbucketmigrator<0.1445.0>: Validate bucket states vbucketmigrator<0.1445.0>: 313 ok INFO REPORT <0.1447.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1447.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1447.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1447.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1447.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1447.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1447.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1447.0>: Starting to move bucket 314 vbucketmigrator<0.1447.0>: Bucket 314 moved to the next server vbucketmigrator<0.1447.0>: Validate bucket states vbucketmigrator<0.1447.0>: 314 ok INFO REPORT <0.1449.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1449.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1449.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1449.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1449.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1449.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1449.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1449.0>: Starting to move bucket 315 vbucketmigrator<0.1449.0>: Bucket 315 moved to the next server vbucketmigrator<0.1449.0>: Validate bucket states vbucketmigrator<0.1449.0>: 315 ok INFO REPORT <0.1451.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1451.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1451.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1451.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1451.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1451.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1451.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1451.0>: Starting to move bucket 316 vbucketmigrator<0.1451.0>: Bucket 316 moved to the next server vbucketmigrator<0.1451.0>: Validate bucket states vbucketmigrator<0.1451.0>: 316 ok INFO REPORT <0.1453.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1453.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1453.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1453.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1453.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1453.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1453.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1453.0>: Starting to move bucket 317 vbucketmigrator<0.1453.0>: Bucket 317 moved to the next server vbucketmigrator<0.1453.0>: Validate bucket states vbucketmigrator<0.1453.0>: 317 ok ERROR REPORT <0.1426.0> 2011-01-03 12:56:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.1455.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1455.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1455.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1455.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1455.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1455.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1455.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1455.0>: Starting to move bucket 318 vbucketmigrator<0.1455.0>: Bucket 318 moved to the next server vbucketmigrator<0.1455.0>: Validate bucket states vbucketmigrator<0.1455.0>: 318 ok INFO REPORT <0.1457.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1457.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1457.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1457.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1457.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1457.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1457.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1457.0>: Starting to move bucket 319 vbucketmigrator<0.1457.0>: Bucket 319 moved to the next server vbucketmigrator<0.1457.0>: Validate bucket states vbucketmigrator<0.1457.0>: 319 ok INFO REPORT <0.1459.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1459.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1459.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1459.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1459.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1459.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1459.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1459.0>: Starting to move bucket 320 vbucketmigrator<0.1459.0>: Bucket 320 moved to the next server vbucketmigrator<0.1459.0>: Validate bucket states vbucketmigrator<0.1459.0>: 320 ok INFO REPORT <0.1461.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1461.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1461.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1461.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1461.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1461.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1461.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1461.0>: Starting to move bucket 321 vbucketmigrator<0.1461.0>: Bucket 321 moved to the next server vbucketmigrator<0.1461.0>: Validate bucket states vbucketmigrator<0.1461.0>: 321 ok INFO REPORT <0.1463.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1463.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1463.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1463.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1463.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1463.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1463.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1463.0>: Starting to move bucket 322 vbucketmigrator<0.1463.0>: Bucket 322 moved to the next server vbucketmigrator<0.1463.0>: Validate bucket states vbucketmigrator<0.1463.0>: 322 ok INFO REPORT <0.1465.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1465.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1465.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1465.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1465.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1465.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1465.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1465.0>: Starting to move bucket 323 vbucketmigrator<0.1465.0>: Bucket 323 moved to the next server vbucketmigrator<0.1465.0>: Validate bucket states vbucketmigrator<0.1465.0>: 323 ok INFO REPORT <0.1467.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1467.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1467.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1467.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1467.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1467.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1467.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1467.0>: Starting to move bucket 324 vbucketmigrator<0.1467.0>: Bucket 324 moved to the next server vbucketmigrator<0.1467.0>: Validate bucket states vbucketmigrator<0.1467.0>: 324 ok INFO REPORT <0.1469.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1469.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1469.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1469.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1469.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1469.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1469.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1469.0>: Starting to move bucket 325 vbucketmigrator<0.1469.0>: Bucket 325 moved to the next server vbucketmigrator<0.1469.0>: Validate bucket states vbucketmigrator<0.1469.0>: 325 ok INFO REPORT <0.1471.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1471.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1471.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1471.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1471.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1471.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1471.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1471.0>: Starting to move bucket 326 vbucketmigrator<0.1471.0>: Bucket 326 moved to the next server vbucketmigrator<0.1471.0>: Validate bucket states vbucketmigrator<0.1471.0>: 326 ok INFO REPORT <0.1473.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1473.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1473.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1473.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1473.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1473.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1473.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1473.0>: Starting to move bucket 327 vbucketmigrator<0.1473.0>: Bucket 327 moved to the next server vbucketmigrator<0.1473.0>: Validate bucket states vbucketmigrator<0.1473.0>: 327 ok INFO REPORT <0.1475.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1475.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1475.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1475.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1475.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1475.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1475.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1475.0>: Starting to move bucket 328 vbucketmigrator<0.1475.0>: Bucket 328 moved to the next server vbucketmigrator<0.1475.0>: Validate bucket states vbucketmigrator<0.1475.0>: 328 ok INFO REPORT <0.1477.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1477.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1477.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1477.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1477.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1477.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1477.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1477.0>: Starting to move bucket 329 vbucketmigrator<0.1477.0>: Bucket 329 moved to the next server vbucketmigrator<0.1477.0>: Validate bucket states vbucketmigrator<0.1477.0>: 329 ok INFO REPORT <0.1479.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1479.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1479.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1479.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1479.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1479.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1479.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1479.0>: Starting to move bucket 330 vbucketmigrator<0.1479.0>: Bucket 330 moved to the next server vbucketmigrator<0.1479.0>: Validate bucket states vbucketmigrator<0.1479.0>: 330 ok INFO REPORT <0.1481.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1481.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1481.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1481.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1481.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1481.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1481.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1481.0>: Starting to move bucket 331 vbucketmigrator<0.1481.0>: Bucket 331 moved to the next server vbucketmigrator<0.1481.0>: Validate bucket states vbucketmigrator<0.1481.0>: 331 ok INFO REPORT <0.1483.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1483.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1483.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1483.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1483.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1483.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1483.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1483.0>: Starting to move bucket 332 vbucketmigrator<0.1483.0>: Bucket 332 moved to the next server vbucketmigrator<0.1483.0>: Validate bucket states vbucketmigrator<0.1483.0>: 332 ok INFO REPORT <0.1485.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1485.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1485.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1485.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1485.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1485.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1485.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1485.0>: Starting to move bucket 333 vbucketmigrator<0.1485.0>: Bucket 333 moved to the next server vbucketmigrator<0.1485.0>: Validate bucket states vbucketmigrator<0.1485.0>: 333 ok INFO REPORT <0.1487.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1487.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1487.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1487.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1487.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1487.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1487.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1487.0>: Starting to move bucket 334 vbucketmigrator<0.1487.0>: Bucket 334 moved to the next server vbucketmigrator<0.1487.0>: Validate bucket states vbucketmigrator<0.1487.0>: 334 ok INFO REPORT <0.1489.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1489.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1489.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1489.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1489.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1489.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1489.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1489.0>: Starting to move bucket 335 vbucketmigrator<0.1489.0>: Bucket 335 moved to the next server vbucketmigrator<0.1489.0>: Validate bucket states vbucketmigrator<0.1489.0>: 335 ok INFO REPORT <0.1491.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1491.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1491.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1491.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1491.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1491.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1491.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1491.0>: Starting to move bucket 336 vbucketmigrator<0.1491.0>: Bucket 336 moved to the next server vbucketmigrator<0.1491.0>: Validate bucket states vbucketmigrator<0.1491.0>: 336 ok INFO REPORT <0.1493.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1493.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1493.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1493.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1493.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1493.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1493.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1493.0>: Starting to move bucket 337 vbucketmigrator<0.1493.0>: Bucket 337 moved to the next server vbucketmigrator<0.1493.0>: Validate bucket states vbucketmigrator<0.1493.0>: 337 ok INFO REPORT <0.1495.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1495.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1495.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1495.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1495.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1495.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1495.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1495.0>: Starting to move bucket 338 vbucketmigrator<0.1495.0>: Bucket 338 moved to the next server vbucketmigrator<0.1495.0>: Validate bucket states vbucketmigrator<0.1495.0>: 338 ok INFO REPORT <0.1497.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1497.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1497.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1497.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1497.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1497.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1497.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1497.0>: Starting to move bucket 339 vbucketmigrator<0.1497.0>: Bucket 339 moved to the next server vbucketmigrator<0.1497.0>: Validate bucket states vbucketmigrator<0.1497.0>: 339 ok INFO REPORT <0.1499.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1499.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1499.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1499.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1499.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1499.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1499.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1499.0>: Starting to move bucket 340 vbucketmigrator<0.1499.0>: Bucket 340 moved to the next server vbucketmigrator<0.1499.0>: Validate bucket states vbucketmigrator<0.1499.0>: 340 ok INFO REPORT <0.1501.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1501.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1501.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1501.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1501.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1501.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1501.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1501.0>: Starting to move bucket 341 vbucketmigrator<0.1501.0>: Bucket 341 moved to the next server vbucketmigrator<0.1501.0>: Validate bucket states vbucketmigrator<0.1501.0>: 341 ok INFO REPORT <0.1503.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1503.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1503.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1503.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1503.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1503.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1503.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1503.0>: Starting to move bucket 342 vbucketmigrator<0.1503.0>: Bucket 342 moved to the next server vbucketmigrator<0.1503.0>: Validate bucket states vbucketmigrator<0.1503.0>: 342 ok INFO REPORT <0.1505.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1505.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1505.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1505.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1505.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1505.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1505.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1505.0>: Starting to move bucket 343 vbucketmigrator<0.1505.0>: Bucket 343 moved to the next server vbucketmigrator<0.1505.0>: Validate bucket states vbucketmigrator<0.1505.0>: 343 ok INFO REPORT <0.1507.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1507.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1507.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1507.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1507.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1507.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1507.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1507.0>: Starting to move bucket 344 vbucketmigrator<0.1507.0>: Bucket 344 moved to the next server vbucketmigrator<0.1507.0>: Validate bucket states vbucketmigrator<0.1507.0>: 344 ok INFO REPORT <0.1509.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1509.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1509.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1509.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1509.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1509.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1509.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1509.0>: Starting to move bucket 345 vbucketmigrator<0.1509.0>: Bucket 345 moved to the next server vbucketmigrator<0.1509.0>: Validate bucket states vbucketmigrator<0.1509.0>: 345 ok INFO REPORT <0.1511.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1511.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1511.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1511.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1511.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1511.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1511.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1511.0>: Starting to move bucket 346 vbucketmigrator<0.1511.0>: Bucket 346 moved to the next server vbucketmigrator<0.1511.0>: Validate bucket states vbucketmigrator<0.1511.0>: 346 ok INFO REPORT <0.1513.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1513.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1513.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1513.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1513.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1513.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1513.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1513.0>: Starting to move bucket 347 vbucketmigrator<0.1513.0>: Bucket 347 moved to the next server vbucketmigrator<0.1513.0>: Validate bucket states vbucketmigrator<0.1513.0>: 347 ok INFO REPORT <0.1515.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1515.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1515.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1515.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1515.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1515.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1515.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1515.0>: Starting to move bucket 348 vbucketmigrator<0.1515.0>: Bucket 348 moved to the next server vbucketmigrator<0.1515.0>: Validate bucket states vbucketmigrator<0.1515.0>: 348 ok INFO REPORT <0.1517.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1517.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1517.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1517.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1517.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1517.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1517.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1517.0>: Starting to move bucket 349 vbucketmigrator<0.1517.0>: Bucket 349 moved to the next server vbucketmigrator<0.1517.0>: Validate bucket states vbucketmigrator<0.1517.0>: 349 ok INFO REPORT <0.1519.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1519.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1519.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1519.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1519.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1519.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1519.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1519.0>: Starting to move bucket 350 vbucketmigrator<0.1519.0>: Bucket 350 moved to the next server vbucketmigrator<0.1519.0>: Validate bucket states vbucketmigrator<0.1519.0>: 350 ok INFO REPORT <0.1521.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1521.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1521.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1521.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1521.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1521.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1521.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1521.0>: Starting to move bucket 351 vbucketmigrator<0.1521.0>: Bucket 351 moved to the next server vbucketmigrator<0.1521.0>: Validate bucket states vbucketmigrator<0.1521.0>: 351 ok INFO REPORT <0.1523.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1523.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1523.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1523.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1523.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1523.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1523.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1523.0>: Starting to move bucket 352 vbucketmigrator<0.1523.0>: Bucket 352 moved to the next server vbucketmigrator<0.1523.0>: Validate bucket states vbucketmigrator<0.1523.0>: 352 ok INFO REPORT <0.1525.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1525.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1525.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1525.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1525.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1525.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1525.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1525.0>: Starting to move bucket 353 vbucketmigrator<0.1525.0>: Bucket 353 moved to the next server vbucketmigrator<0.1525.0>: Validate bucket states vbucketmigrator<0.1525.0>: 353 ok INFO REPORT <0.1527.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1527.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1527.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1527.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1527.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1527.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1527.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1527.0>: Starting to move bucket 354 vbucketmigrator<0.1527.0>: Bucket 354 moved to the next server vbucketmigrator<0.1527.0>: Validate bucket states vbucketmigrator<0.1527.0>: 354 ok INFO REPORT <0.1529.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1529.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1529.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1529.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1529.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1529.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1529.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1529.0>: Starting to move bucket 355 vbucketmigrator<0.1529.0>: Bucket 355 moved to the next server vbucketmigrator<0.1529.0>: Validate bucket states vbucketmigrator<0.1529.0>: 355 ok INFO REPORT <0.1531.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1531.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1531.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1531.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1531.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1531.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1531.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1531.0>: Starting to move bucket 356 vbucketmigrator<0.1531.0>: Bucket 356 moved to the next server vbucketmigrator<0.1531.0>: Validate bucket states vbucketmigrator<0.1531.0>: 356 ok INFO REPORT <0.1533.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1533.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1533.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1533.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1533.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1533.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1533.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1533.0>: Starting to move bucket 357 vbucketmigrator<0.1533.0>: Bucket 357 moved to the next server vbucketmigrator<0.1533.0>: Validate bucket states vbucketmigrator<0.1533.0>: 357 ok INFO REPORT <0.1535.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1535.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1535.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1535.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1535.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1535.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1535.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1535.0>: Starting to move bucket 358 vbucketmigrator<0.1535.0>: Bucket 358 moved to the next server vbucketmigrator<0.1535.0>: Validate bucket states vbucketmigrator<0.1535.0>: 358 ok INFO REPORT <0.1537.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1537.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1537.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1537.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1537.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1537.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1537.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1537.0>: Starting to move bucket 359 vbucketmigrator<0.1537.0>: Bucket 359 moved to the next server vbucketmigrator<0.1537.0>: Validate bucket states vbucketmigrator<0.1537.0>: 359 ok INFO REPORT <0.1539.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1539.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1539.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1539.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1539.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1539.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1539.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1539.0>: Starting to move bucket 360 vbucketmigrator<0.1539.0>: Bucket 360 moved to the next server vbucketmigrator<0.1539.0>: Validate bucket states vbucketmigrator<0.1539.0>: 360 ok INFO REPORT <0.1541.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1541.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1541.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1541.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1541.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1541.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1541.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1541.0>: Starting to move bucket 361 vbucketmigrator<0.1541.0>: Bucket 361 moved to the next server vbucketmigrator<0.1541.0>: Validate bucket states vbucketmigrator<0.1541.0>: 361 ok INFO REPORT <0.1543.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1543.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1543.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1543.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1543.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1543.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1543.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1543.0>: Starting to move bucket 362 vbucketmigrator<0.1543.0>: Bucket 362 moved to the next server vbucketmigrator<0.1543.0>: Validate bucket states vbucketmigrator<0.1543.0>: 362 ok INFO REPORT <0.1545.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1545.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1545.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1545.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1545.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1545.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1545.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1545.0>: Starting to move bucket 363 vbucketmigrator<0.1545.0>: Bucket 363 moved to the next server vbucketmigrator<0.1545.0>: Validate bucket states vbucketmigrator<0.1545.0>: 363 ok INFO REPORT <0.1547.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1547.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1547.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1547.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1547.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1547.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1547.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1547.0>: Starting to move bucket 364 vbucketmigrator<0.1547.0>: Bucket 364 moved to the next server vbucketmigrator<0.1547.0>: Validate bucket states vbucketmigrator<0.1547.0>: 364 ok INFO REPORT <0.1549.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1549.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1549.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1549.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1549.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1549.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1549.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1549.0>: Starting to move bucket 365 vbucketmigrator<0.1549.0>: Bucket 365 moved to the next server vbucketmigrator<0.1549.0>: Validate bucket states vbucketmigrator<0.1549.0>: 365 ok INFO REPORT <0.1551.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1551.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1551.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1551.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1551.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1551.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1551.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1551.0>: Starting to move bucket 366 vbucketmigrator<0.1551.0>: Bucket 366 moved to the next server vbucketmigrator<0.1551.0>: Validate bucket states vbucketmigrator<0.1551.0>: 366 ok INFO REPORT <0.1553.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1553.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1553.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1553.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1553.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1553.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1553.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1553.0>: Starting to move bucket 367 vbucketmigrator<0.1553.0>: Bucket 367 moved to the next server vbucketmigrator<0.1553.0>: Validate bucket states vbucketmigrator<0.1553.0>: 367 ok INFO REPORT <0.1555.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1555.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1555.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1555.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1555.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1555.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1555.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1555.0>: Starting to move bucket 368 vbucketmigrator<0.1555.0>: Bucket 368 moved to the next server vbucketmigrator<0.1555.0>: Validate bucket states vbucketmigrator<0.1555.0>: 368 ok INFO REPORT <0.1557.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1557.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1557.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1557.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1557.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1557.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1557.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1557.0>: Starting to move bucket 369 vbucketmigrator<0.1557.0>: Bucket 369 moved to the next server vbucketmigrator<0.1557.0>: Validate bucket states vbucketmigrator<0.1557.0>: 369 ok INFO REPORT <0.1559.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1559.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1559.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1559.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1559.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1559.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1559.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1559.0>: Starting to move bucket 370 vbucketmigrator<0.1559.0>: Bucket 370 moved to the next server vbucketmigrator<0.1559.0>: Validate bucket states vbucketmigrator<0.1559.0>: 370 ok INFO REPORT <0.1561.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1561.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1561.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1561.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1561.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1561.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1561.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1561.0>: Starting to move bucket 371 vbucketmigrator<0.1561.0>: Bucket 371 moved to the next server vbucketmigrator<0.1561.0>: Validate bucket states vbucketmigrator<0.1561.0>: 371 ok INFO REPORT <0.1563.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1563.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1563.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1563.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1563.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1563.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1563.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1563.0>: Starting to move bucket 372 vbucketmigrator<0.1563.0>: Bucket 372 moved to the next server vbucketmigrator<0.1563.0>: Validate bucket states vbucketmigrator<0.1563.0>: 372 ok INFO REPORT <0.1565.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1565.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1565.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1565.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1565.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1565.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1565.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1565.0>: Starting to move bucket 373 vbucketmigrator<0.1565.0>: Bucket 373 moved to the next server vbucketmigrator<0.1565.0>: Validate bucket states vbucketmigrator<0.1565.0>: 373 ok INFO REPORT <0.1567.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1567.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1567.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1567.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1567.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1567.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1567.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1567.0>: Starting to move bucket 374 vbucketmigrator<0.1567.0>: Bucket 374 moved to the next server vbucketmigrator<0.1567.0>: Validate bucket states vbucketmigrator<0.1567.0>: 374 ok INFO REPORT <0.1569.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1569.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1569.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1569.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1569.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1569.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1569.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1569.0>: Starting to move bucket 375 vbucketmigrator<0.1569.0>: Bucket 375 moved to the next server vbucketmigrator<0.1569.0>: Validate bucket states vbucketmigrator<0.1569.0>: 375 ok INFO REPORT <0.1571.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1571.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1571.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1571.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1571.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1571.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1571.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1571.0>: Starting to move bucket 376 vbucketmigrator<0.1571.0>: Bucket 376 moved to the next server vbucketmigrator<0.1571.0>: Validate bucket states vbucketmigrator<0.1571.0>: 376 ok INFO REPORT <0.1573.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1573.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1573.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1573.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1573.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1573.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1573.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1573.0>: Starting to move bucket 377 vbucketmigrator<0.1573.0>: Bucket 377 moved to the next server vbucketmigrator<0.1573.0>: Validate bucket states vbucketmigrator<0.1573.0>: 377 ok INFO REPORT <0.1575.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1575.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1575.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1575.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1575.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1575.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1575.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1575.0>: Starting to move bucket 378 vbucketmigrator<0.1575.0>: Bucket 378 moved to the next server vbucketmigrator<0.1575.0>: Validate bucket states vbucketmigrator<0.1575.0>: 378 ok INFO REPORT <0.1577.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1577.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1577.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1577.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1577.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1577.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1577.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1577.0>: Starting to move bucket 379 vbucketmigrator<0.1577.0>: Bucket 379 moved to the next server vbucketmigrator<0.1577.0>: Validate bucket states vbucketmigrator<0.1577.0>: 379 ok INFO REPORT <0.1579.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1579.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1579.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1579.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1579.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1579.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1579.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1579.0>: Starting to move bucket 380 vbucketmigrator<0.1579.0>: Bucket 380 moved to the next server vbucketmigrator<0.1579.0>: Validate bucket states vbucketmigrator<0.1579.0>: 380 ok INFO REPORT <0.1581.0> 2011-01-03 12:56:44 =============================================================================== vbucketmigrator<0.1581.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1581.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1581.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1581.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1581.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1581.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1581.0>: Starting to move bucket 381 vbucketmigrator<0.1581.0>: Bucket 381 moved to the next server vbucketmigrator<0.1581.0>: Validate bucket states vbucketmigrator<0.1581.0>: 381 ok INFO REPORT <0.1583.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1583.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1583.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1583.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1583.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1583.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1583.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1583.0>: Starting to move bucket 382 vbucketmigrator<0.1583.0>: Bucket 382 moved to the next server vbucketmigrator<0.1583.0>: Validate bucket states vbucketmigrator<0.1583.0>: 382 ok INFO REPORT <0.1585.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1585.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1585.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1585.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1585.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1585.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1585.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1585.0>: Starting to move bucket 383 vbucketmigrator<0.1585.0>: Bucket 383 moved to the next server vbucketmigrator<0.1585.0>: Validate bucket states vbucketmigrator<0.1585.0>: 383 ok INFO REPORT <0.1587.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1587.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1587.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1587.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1587.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1587.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1587.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1587.0>: Starting to move bucket 384 vbucketmigrator<0.1587.0>: Bucket 384 moved to the next server vbucketmigrator<0.1587.0>: Validate bucket states vbucketmigrator<0.1587.0>: 384 ok INFO REPORT <0.1589.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1589.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1589.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1589.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1589.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1589.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1589.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1589.0>: Starting to move bucket 385 vbucketmigrator<0.1589.0>: Bucket 385 moved to the next server vbucketmigrator<0.1589.0>: Validate bucket states vbucketmigrator<0.1589.0>: 385 ok INFO REPORT <0.1591.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1591.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1591.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1591.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1591.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1591.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1591.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1591.0>: Starting to move bucket 386 vbucketmigrator<0.1591.0>: Bucket 386 moved to the next server vbucketmigrator<0.1591.0>: Validate bucket states vbucketmigrator<0.1591.0>: 386 ok INFO REPORT <0.1593.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1593.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1593.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1593.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1593.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1593.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1593.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1593.0>: Starting to move bucket 387 vbucketmigrator<0.1593.0>: Bucket 387 moved to the next server vbucketmigrator<0.1593.0>: Validate bucket states vbucketmigrator<0.1593.0>: 387 ok INFO REPORT <0.1595.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1595.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1595.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1595.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1595.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1595.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1595.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1595.0>: Starting to move bucket 388 vbucketmigrator<0.1595.0>: Bucket 388 moved to the next server vbucketmigrator<0.1595.0>: Validate bucket states vbucketmigrator<0.1595.0>: 388 ok INFO REPORT <0.1597.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1597.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1597.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1597.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1597.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1597.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1597.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1597.0>: Starting to move bucket 389 vbucketmigrator<0.1597.0>: Bucket 389 moved to the next server vbucketmigrator<0.1597.0>: Validate bucket states vbucketmigrator<0.1597.0>: 389 ok INFO REPORT <0.1599.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1599.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1599.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1599.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1599.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1599.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1599.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1599.0>: Starting to move bucket 390 vbucketmigrator<0.1599.0>: Bucket 390 moved to the next server vbucketmigrator<0.1599.0>: Validate bucket states vbucketmigrator<0.1599.0>: 390 ok INFO REPORT <0.1601.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1601.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1601.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1601.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1601.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1601.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1601.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1601.0>: Starting to move bucket 391 vbucketmigrator<0.1601.0>: Bucket 391 moved to the next server vbucketmigrator<0.1601.0>: Validate bucket states vbucketmigrator<0.1601.0>: 391 ok INFO REPORT <0.1603.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1603.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1603.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1603.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1603.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1603.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1603.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1603.0>: Starting to move bucket 392 vbucketmigrator<0.1603.0>: Bucket 392 moved to the next server vbucketmigrator<0.1603.0>: Validate bucket states vbucketmigrator<0.1603.0>: 392 ok INFO REPORT <0.1605.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1605.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1605.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1605.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1605.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1605.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1605.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1605.0>: Starting to move bucket 393 vbucketmigrator<0.1605.0>: Bucket 393 moved to the next server vbucketmigrator<0.1605.0>: Validate bucket states vbucketmigrator<0.1605.0>: 393 ok INFO REPORT <0.1607.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1607.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1607.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1607.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1607.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1607.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1607.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1607.0>: Starting to move bucket 394 vbucketmigrator<0.1607.0>: Bucket 394 moved to the next server vbucketmigrator<0.1607.0>: Validate bucket states vbucketmigrator<0.1607.0>: 394 ok INFO REPORT <0.1609.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1609.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1609.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1609.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1609.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1609.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1609.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1609.0>: Starting to move bucket 395 vbucketmigrator<0.1609.0>: Bucket 395 moved to the next server vbucketmigrator<0.1609.0>: Validate bucket states vbucketmigrator<0.1609.0>: 395 ok INFO REPORT <0.1611.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1611.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1611.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1611.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1611.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1611.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1611.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1611.0>: Starting to move bucket 396 vbucketmigrator<0.1611.0>: Bucket 396 moved to the next server vbucketmigrator<0.1611.0>: Validate bucket states vbucketmigrator<0.1611.0>: 396 ok INFO REPORT <0.1613.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1613.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1613.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1613.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1613.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1613.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1613.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1613.0>: Starting to move bucket 397 vbucketmigrator<0.1613.0>: Bucket 397 moved to the next server vbucketmigrator<0.1613.0>: Validate bucket states vbucketmigrator<0.1613.0>: 397 ok INFO REPORT <0.1615.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1615.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1615.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1615.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1615.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1615.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1615.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1615.0>: Starting to move bucket 398 vbucketmigrator<0.1615.0>: Bucket 398 moved to the next server vbucketmigrator<0.1615.0>: Validate bucket states vbucketmigrator<0.1615.0>: 398 ok INFO REPORT <0.1617.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1617.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1617.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1617.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1617.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1617.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1617.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1617.0>: Starting to move bucket 399 vbucketmigrator<0.1617.0>: Bucket 399 moved to the next server vbucketmigrator<0.1617.0>: Validate bucket states vbucketmigrator<0.1617.0>: 399 ok INFO REPORT <0.1619.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1619.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1619.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1619.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1619.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1619.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1619.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1619.0>: Starting to move bucket 400 vbucketmigrator<0.1619.0>: Bucket 400 moved to the next server vbucketmigrator<0.1619.0>: Validate bucket states vbucketmigrator<0.1619.0>: 400 ok INFO REPORT <0.1621.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1621.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1621.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1621.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1621.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1621.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1621.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1621.0>: Starting to move bucket 401 vbucketmigrator<0.1621.0>: Bucket 401 moved to the next server vbucketmigrator<0.1621.0>: Validate bucket states vbucketmigrator<0.1621.0>: 401 ok INFO REPORT <0.1623.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1623.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1623.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1623.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1623.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1623.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1623.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1623.0>: Starting to move bucket 402 vbucketmigrator<0.1623.0>: Bucket 402 moved to the next server vbucketmigrator<0.1623.0>: Validate bucket states vbucketmigrator<0.1623.0>: 402 ok INFO REPORT <0.1625.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1625.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1625.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1625.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1625.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1625.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1625.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1625.0>: Starting to move bucket 403 vbucketmigrator<0.1625.0>: Bucket 403 moved to the next server vbucketmigrator<0.1625.0>: Validate bucket states vbucketmigrator<0.1625.0>: 403 ok INFO REPORT <0.1627.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1627.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1627.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1627.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1627.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1627.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1627.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1627.0>: Starting to move bucket 404 vbucketmigrator<0.1627.0>: Bucket 404 moved to the next server vbucketmigrator<0.1627.0>: Validate bucket states vbucketmigrator<0.1627.0>: 404 ok INFO REPORT <0.1629.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1629.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1629.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1629.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1629.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1629.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1629.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1629.0>: Starting to move bucket 405 vbucketmigrator<0.1629.0>: Bucket 405 moved to the next server vbucketmigrator<0.1629.0>: Validate bucket states vbucketmigrator<0.1629.0>: 405 ok INFO REPORT <0.1633.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1633.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1633.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1633.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1633.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1633.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1633.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1633.0>: Starting to move bucket 406 vbucketmigrator<0.1633.0>: Bucket 406 moved to the next server vbucketmigrator<0.1633.0>: Validate bucket states vbucketmigrator<0.1633.0>: 406 ok INFO REPORT <0.1635.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1635.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1635.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1635.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1635.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1635.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1635.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1635.0>: Starting to move bucket 407 vbucketmigrator<0.1635.0>: Bucket 407 moved to the next server vbucketmigrator<0.1635.0>: Validate bucket states vbucketmigrator<0.1635.0>: 407 ok INFO REPORT <0.1637.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1637.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1637.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1637.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1637.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1637.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1637.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1637.0>: Starting to move bucket 408 vbucketmigrator<0.1637.0>: Bucket 408 moved to the next server vbucketmigrator<0.1637.0>: Validate bucket states vbucketmigrator<0.1637.0>: 408 ok INFO REPORT <0.1639.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1639.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1639.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1639.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1639.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1639.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1639.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1639.0>: Starting to move bucket 409 vbucketmigrator<0.1639.0>: Bucket 409 moved to the next server vbucketmigrator<0.1639.0>: Validate bucket states vbucketmigrator<0.1639.0>: 409 ok INFO REPORT <0.1642.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1642.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1642.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1642.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1642.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1642.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1642.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1642.0>: Starting to move bucket 410 vbucketmigrator<0.1642.0>: Bucket 410 moved to the next server vbucketmigrator<0.1642.0>: Validate bucket states vbucketmigrator<0.1642.0>: 410 ok INFO REPORT <0.1644.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1644.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1644.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1644.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1644.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1644.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1644.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1644.0>: Starting to move bucket 411 vbucketmigrator<0.1644.0>: Bucket 411 moved to the next server vbucketmigrator<0.1644.0>: Validate bucket states vbucketmigrator<0.1644.0>: 411 ok INFO REPORT <0.1646.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1646.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1646.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1646.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1646.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1646.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1646.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1646.0>: Starting to move bucket 412 vbucketmigrator<0.1646.0>: Bucket 412 moved to the next server vbucketmigrator<0.1646.0>: Validate bucket states vbucketmigrator<0.1646.0>: 412 ok INFO REPORT <0.1648.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1648.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1648.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1648.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1648.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1648.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1648.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1648.0>: Starting to move bucket 413 vbucketmigrator<0.1648.0>: Bucket 413 moved to the next server vbucketmigrator<0.1648.0>: Validate bucket states vbucketmigrator<0.1648.0>: 413 ok INFO REPORT <0.1650.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1650.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1650.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1650.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1650.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1650.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1650.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1650.0>: Starting to move bucket 414 vbucketmigrator<0.1650.0>: Bucket 414 moved to the next server vbucketmigrator<0.1650.0>: Validate bucket states vbucketmigrator<0.1650.0>: 414 ok INFO REPORT <0.1652.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1652.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1652.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1652.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1652.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1652.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1652.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1652.0>: Starting to move bucket 415 vbucketmigrator<0.1652.0>: Bucket 415 moved to the next server vbucketmigrator<0.1652.0>: Validate bucket states vbucketmigrator<0.1652.0>: 415 ok INFO REPORT <0.1654.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1654.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1654.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1654.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1654.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1654.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1654.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1654.0>: Starting to move bucket 416 vbucketmigrator<0.1654.0>: Bucket 416 moved to the next server vbucketmigrator<0.1654.0>: Validate bucket states vbucketmigrator<0.1654.0>: 416 ok INFO REPORT <0.1656.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1656.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1656.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1656.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1656.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1656.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1656.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1656.0>: Starting to move bucket 417 vbucketmigrator<0.1656.0>: Bucket 417 moved to the next server vbucketmigrator<0.1656.0>: Validate bucket states vbucketmigrator<0.1656.0>: 417 ok INFO REPORT <0.1658.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1658.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1658.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1658.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1658.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1658.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1658.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1658.0>: Starting to move bucket 418 vbucketmigrator<0.1658.0>: Bucket 418 moved to the next server vbucketmigrator<0.1658.0>: Validate bucket states vbucketmigrator<0.1658.0>: 418 ok INFO REPORT <0.1660.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1660.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1660.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1660.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1660.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1660.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1660.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1660.0>: Starting to move bucket 419 vbucketmigrator<0.1660.0>: Bucket 419 moved to the next server vbucketmigrator<0.1660.0>: Validate bucket states vbucketmigrator<0.1660.0>: 419 ok ERROR REPORT <0.1630.0> 2011-01-03 12:56:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.1662.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1662.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1662.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1662.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1662.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1662.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1662.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1662.0>: Starting to move bucket 420 vbucketmigrator<0.1662.0>: Bucket 420 moved to the next server vbucketmigrator<0.1662.0>: Validate bucket states vbucketmigrator<0.1662.0>: 420 ok INFO REPORT <0.1664.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1664.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1664.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1664.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1664.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1664.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1664.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1664.0>: Starting to move bucket 421 vbucketmigrator<0.1664.0>: Bucket 421 moved to the next server vbucketmigrator<0.1664.0>: Validate bucket states vbucketmigrator<0.1664.0>: 421 ok INFO REPORT <0.1666.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1666.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1666.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1666.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1666.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1666.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1666.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1666.0>: Starting to move bucket 422 vbucketmigrator<0.1666.0>: Bucket 422 moved to the next server vbucketmigrator<0.1666.0>: Validate bucket states vbucketmigrator<0.1666.0>: 422 ok INFO REPORT <0.1668.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1668.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1668.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1668.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1668.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1668.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1668.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1668.0>: Starting to move bucket 423 vbucketmigrator<0.1668.0>: Bucket 423 moved to the next server vbucketmigrator<0.1668.0>: Validate bucket states vbucketmigrator<0.1668.0>: 423 ok INFO REPORT <0.1670.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1670.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1670.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1670.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1670.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1670.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1670.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1670.0>: Starting to move bucket 424 vbucketmigrator<0.1670.0>: Bucket 424 moved to the next server vbucketmigrator<0.1670.0>: Validate bucket states vbucketmigrator<0.1670.0>: 424 ok INFO REPORT <0.1672.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1672.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1672.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1672.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1672.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1672.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1672.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1672.0>: Starting to move bucket 425 vbucketmigrator<0.1672.0>: Bucket 425 moved to the next server vbucketmigrator<0.1672.0>: Validate bucket states vbucketmigrator<0.1672.0>: 425 ok INFO REPORT <0.1674.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1674.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1674.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1674.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1674.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1674.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1674.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1674.0>: Starting to move bucket 426 vbucketmigrator<0.1674.0>: Bucket 426 moved to the next server vbucketmigrator<0.1674.0>: Validate bucket states vbucketmigrator<0.1674.0>: 426 ok INFO REPORT <0.1676.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1676.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1676.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1676.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1676.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1676.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1676.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1676.0>: Starting to move bucket 427 vbucketmigrator<0.1676.0>: Bucket 427 moved to the next server vbucketmigrator<0.1676.0>: Validate bucket states vbucketmigrator<0.1676.0>: 427 ok INFO REPORT <0.1678.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1678.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1678.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1678.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1678.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1678.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1678.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1678.0>: Starting to move bucket 428 vbucketmigrator<0.1678.0>: Bucket 428 moved to the next server vbucketmigrator<0.1678.0>: Validate bucket states vbucketmigrator<0.1678.0>: 428 ok INFO REPORT <0.1680.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1680.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1680.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1680.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1680.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1680.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1680.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1680.0>: Starting to move bucket 429 vbucketmigrator<0.1680.0>: Bucket 429 moved to the next server vbucketmigrator<0.1680.0>: Validate bucket states vbucketmigrator<0.1680.0>: 429 ok INFO REPORT <0.1682.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1682.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1682.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1682.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1682.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1682.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1682.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1682.0>: Starting to move bucket 430 vbucketmigrator<0.1682.0>: Bucket 430 moved to the next server vbucketmigrator<0.1682.0>: Validate bucket states vbucketmigrator<0.1682.0>: 430 ok INFO REPORT <0.1684.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1684.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1684.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1684.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1684.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1684.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1684.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1684.0>: Starting to move bucket 431 vbucketmigrator<0.1684.0>: Bucket 431 moved to the next server vbucketmigrator<0.1684.0>: Validate bucket states vbucketmigrator<0.1684.0>: 431 ok INFO REPORT <0.1686.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1686.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1686.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1686.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1686.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1686.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1686.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1686.0>: Starting to move bucket 432 vbucketmigrator<0.1686.0>: Bucket 432 moved to the next server vbucketmigrator<0.1686.0>: Validate bucket states vbucketmigrator<0.1686.0>: 432 ok INFO REPORT <0.1688.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1688.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1688.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1688.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1688.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1688.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1688.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1688.0>: Starting to move bucket 433 vbucketmigrator<0.1688.0>: Bucket 433 moved to the next server vbucketmigrator<0.1688.0>: Validate bucket states vbucketmigrator<0.1688.0>: 433 ok INFO REPORT <0.1690.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1690.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1690.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1690.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1690.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1690.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1690.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1690.0>: Starting to move bucket 434 vbucketmigrator<0.1690.0>: Bucket 434 moved to the next server vbucketmigrator<0.1690.0>: Validate bucket states vbucketmigrator<0.1690.0>: 434 ok INFO REPORT <0.1692.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1692.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1692.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1692.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1692.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1692.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1692.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1692.0>: Starting to move bucket 435 vbucketmigrator<0.1692.0>: Bucket 435 moved to the next server vbucketmigrator<0.1692.0>: Validate bucket states vbucketmigrator<0.1692.0>: 435 ok INFO REPORT <0.1694.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1694.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1694.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1694.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1694.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1694.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1694.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1694.0>: Starting to move bucket 436 vbucketmigrator<0.1694.0>: Bucket 436 moved to the next server vbucketmigrator<0.1694.0>: Validate bucket states vbucketmigrator<0.1694.0>: 436 ok INFO REPORT <0.1696.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1696.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1696.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1696.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1696.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1696.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1696.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1696.0>: Starting to move bucket 437 vbucketmigrator<0.1696.0>: Bucket 437 moved to the next server vbucketmigrator<0.1696.0>: Validate bucket states vbucketmigrator<0.1696.0>: 437 ok INFO REPORT <0.1698.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1698.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1698.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1698.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1698.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1698.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1698.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1698.0>: Starting to move bucket 438 vbucketmigrator<0.1698.0>: Bucket 438 moved to the next server vbucketmigrator<0.1698.0>: Validate bucket states vbucketmigrator<0.1698.0>: 438 ok INFO REPORT <0.1700.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1700.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1700.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1700.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1700.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1700.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1700.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1700.0>: Starting to move bucket 439 vbucketmigrator<0.1700.0>: Bucket 439 moved to the next server vbucketmigrator<0.1700.0>: Validate bucket states vbucketmigrator<0.1700.0>: 439 ok INFO REPORT <0.1702.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1702.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1702.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1702.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1702.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1702.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1702.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1702.0>: Starting to move bucket 440 vbucketmigrator<0.1702.0>: Bucket 440 moved to the next server vbucketmigrator<0.1702.0>: Validate bucket states vbucketmigrator<0.1702.0>: 440 ok INFO REPORT <0.1704.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1704.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1704.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1704.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1704.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1704.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1704.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1704.0>: Starting to move bucket 441 vbucketmigrator<0.1704.0>: Bucket 441 moved to the next server vbucketmigrator<0.1704.0>: Validate bucket states vbucketmigrator<0.1704.0>: 441 ok INFO REPORT <0.1706.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1706.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1706.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1706.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1706.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1706.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1706.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1706.0>: Starting to move bucket 442 vbucketmigrator<0.1706.0>: Bucket 442 moved to the next server vbucketmigrator<0.1706.0>: Validate bucket states vbucketmigrator<0.1706.0>: 442 ok INFO REPORT <0.1708.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1708.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1708.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1708.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1708.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1708.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1708.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1708.0>: Starting to move bucket 443 vbucketmigrator<0.1708.0>: Bucket 443 moved to the next server vbucketmigrator<0.1708.0>: Validate bucket states vbucketmigrator<0.1708.0>: 443 ok INFO REPORT <0.1710.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1710.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1710.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1710.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1710.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1710.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1710.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1710.0>: Starting to move bucket 444 vbucketmigrator<0.1710.0>: Bucket 444 moved to the next server vbucketmigrator<0.1710.0>: Validate bucket states vbucketmigrator<0.1710.0>: 444 ok INFO REPORT <0.1712.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1712.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1712.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1712.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1712.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1712.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1712.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1712.0>: Starting to move bucket 445 vbucketmigrator<0.1712.0>: Bucket 445 moved to the next server vbucketmigrator<0.1712.0>: Validate bucket states vbucketmigrator<0.1712.0>: 445 ok INFO REPORT <0.1714.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1714.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1714.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1714.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1714.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1714.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1714.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1714.0>: Starting to move bucket 446 vbucketmigrator<0.1714.0>: Bucket 446 moved to the next server vbucketmigrator<0.1714.0>: Validate bucket states vbucketmigrator<0.1714.0>: 446 ok INFO REPORT <0.1716.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1716.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1716.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1716.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1716.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1716.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1716.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1716.0>: Starting to move bucket 447 vbucketmigrator<0.1716.0>: Bucket 447 moved to the next server vbucketmigrator<0.1716.0>: Validate bucket states vbucketmigrator<0.1716.0>: 447 ok INFO REPORT <0.1718.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1718.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1718.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1718.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1718.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1718.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1718.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1718.0>: Starting to move bucket 448 vbucketmigrator<0.1718.0>: Bucket 448 moved to the next server vbucketmigrator<0.1718.0>: Validate bucket states vbucketmigrator<0.1718.0>: 448 ok INFO REPORT <0.1720.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1720.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1720.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1720.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1720.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1720.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1720.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1720.0>: Starting to move bucket 449 vbucketmigrator<0.1720.0>: Bucket 449 moved to the next server vbucketmigrator<0.1720.0>: Validate bucket states vbucketmigrator<0.1720.0>: 449 ok INFO REPORT <0.1722.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1722.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1722.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1722.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1722.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1722.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1722.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1722.0>: Starting to move bucket 450 vbucketmigrator<0.1722.0>: Bucket 450 moved to the next server vbucketmigrator<0.1722.0>: Validate bucket states vbucketmigrator<0.1722.0>: 450 ok INFO REPORT <0.1724.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1724.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1724.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1724.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1724.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1724.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1724.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1724.0>: Starting to move bucket 451 vbucketmigrator<0.1724.0>: Bucket 451 moved to the next server vbucketmigrator<0.1724.0>: Validate bucket states vbucketmigrator<0.1724.0>: 451 ok INFO REPORT <0.1726.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1726.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1726.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1726.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1726.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1726.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1726.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1726.0>: Starting to move bucket 452 vbucketmigrator<0.1726.0>: Bucket 452 moved to the next server vbucketmigrator<0.1726.0>: Validate bucket states vbucketmigrator<0.1726.0>: 452 ok INFO REPORT <0.1728.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1728.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1728.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1728.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1728.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1728.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1728.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1728.0>: Starting to move bucket 453 vbucketmigrator<0.1728.0>: Bucket 453 moved to the next server vbucketmigrator<0.1728.0>: Validate bucket states vbucketmigrator<0.1728.0>: 453 ok INFO REPORT <0.1730.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1730.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1730.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1730.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1730.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1730.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1730.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1730.0>: Starting to move bucket 454 vbucketmigrator<0.1730.0>: Bucket 454 moved to the next server vbucketmigrator<0.1730.0>: Validate bucket states vbucketmigrator<0.1730.0>: 454 ok INFO REPORT <0.1732.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1732.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1732.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1732.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1732.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1732.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1732.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1732.0>: Starting to move bucket 455 vbucketmigrator<0.1732.0>: Bucket 455 moved to the next server vbucketmigrator<0.1732.0>: Validate bucket states vbucketmigrator<0.1732.0>: 455 ok INFO REPORT <0.1734.0> 2011-01-03 12:56:45 =============================================================================== vbucketmigrator<0.1734.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1734.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1734.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1734.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1734.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1734.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1734.0>: Starting to move bucket 456 vbucketmigrator<0.1734.0>: Bucket 456 moved to the next server vbucketmigrator<0.1734.0>: Validate bucket states vbucketmigrator<0.1734.0>: 456 ok INFO REPORT <0.1736.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1736.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1736.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1736.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1736.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1736.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1736.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1736.0>: Starting to move bucket 457 vbucketmigrator<0.1736.0>: Bucket 457 moved to the next server vbucketmigrator<0.1736.0>: Validate bucket states vbucketmigrator<0.1736.0>: 457 ok INFO REPORT <0.1738.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1738.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1738.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1738.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1738.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1738.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1738.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1738.0>: Starting to move bucket 458 vbucketmigrator<0.1738.0>: Bucket 458 moved to the next server vbucketmigrator<0.1738.0>: Validate bucket states vbucketmigrator<0.1738.0>: 458 ok INFO REPORT <0.1740.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1740.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1740.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1740.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1740.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1740.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1740.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1740.0>: Starting to move bucket 459 vbucketmigrator<0.1740.0>: Bucket 459 moved to the next server vbucketmigrator<0.1740.0>: Validate bucket states vbucketmigrator<0.1740.0>: 459 ok INFO REPORT <0.1742.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1742.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1742.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1742.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1742.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1742.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1742.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1742.0>: Starting to move bucket 460 vbucketmigrator<0.1742.0>: Bucket 460 moved to the next server vbucketmigrator<0.1742.0>: Validate bucket states vbucketmigrator<0.1742.0>: 460 ok INFO REPORT <0.1744.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1744.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1744.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1744.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1744.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1744.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1744.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1744.0>: Starting to move bucket 461 vbucketmigrator<0.1744.0>: Bucket 461 moved to the next server vbucketmigrator<0.1744.0>: Validate bucket states vbucketmigrator<0.1744.0>: 461 ok INFO REPORT <0.1746.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1746.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1746.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1746.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1746.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1746.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1746.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1746.0>: Starting to move bucket 462 vbucketmigrator<0.1746.0>: Bucket 462 moved to the next server vbucketmigrator<0.1746.0>: Validate bucket states vbucketmigrator<0.1746.0>: 462 ok INFO REPORT <0.1748.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1748.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1748.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1748.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1748.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1748.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1748.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1748.0>: Starting to move bucket 463 vbucketmigrator<0.1748.0>: Bucket 463 moved to the next server vbucketmigrator<0.1748.0>: Validate bucket states vbucketmigrator<0.1748.0>: 463 ok INFO REPORT <0.1750.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1750.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1750.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1750.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1750.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1750.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1750.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1750.0>: Starting to move bucket 464 vbucketmigrator<0.1750.0>: Bucket 464 moved to the next server vbucketmigrator<0.1750.0>: Validate bucket states vbucketmigrator<0.1750.0>: 464 ok INFO REPORT <0.1752.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1752.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1752.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1752.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1752.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1752.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1752.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1752.0>: Starting to move bucket 465 vbucketmigrator<0.1752.0>: Bucket 465 moved to the next server vbucketmigrator<0.1752.0>: Validate bucket states vbucketmigrator<0.1752.0>: 465 ok INFO REPORT <0.1754.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1754.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1754.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1754.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1754.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1754.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1754.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1754.0>: Starting to move bucket 466 vbucketmigrator<0.1754.0>: Bucket 466 moved to the next server vbucketmigrator<0.1754.0>: Validate bucket states vbucketmigrator<0.1754.0>: 466 ok INFO REPORT <0.1756.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1756.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1756.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1756.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1756.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1756.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1756.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1756.0>: Starting to move bucket 467 vbucketmigrator<0.1756.0>: Bucket 467 moved to the next server vbucketmigrator<0.1756.0>: Validate bucket states vbucketmigrator<0.1756.0>: 467 ok INFO REPORT <0.1758.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1758.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1758.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1758.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1758.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1758.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1758.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1758.0>: Starting to move bucket 468 vbucketmigrator<0.1758.0>: Bucket 468 moved to the next server vbucketmigrator<0.1758.0>: Validate bucket states vbucketmigrator<0.1758.0>: 468 ok INFO REPORT <0.1760.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1760.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1760.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1760.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1760.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1760.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1760.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1760.0>: Starting to move bucket 469 vbucketmigrator<0.1760.0>: Bucket 469 moved to the next server vbucketmigrator<0.1760.0>: Validate bucket states vbucketmigrator<0.1760.0>: 469 ok INFO REPORT <0.1762.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1762.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1762.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1762.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1762.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1762.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1762.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1762.0>: Starting to move bucket 470 vbucketmigrator<0.1762.0>: Bucket 470 moved to the next server vbucketmigrator<0.1762.0>: Validate bucket states vbucketmigrator<0.1762.0>: 470 ok INFO REPORT <0.1764.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1764.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1764.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1764.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1764.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1764.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1764.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1764.0>: Starting to move bucket 471 vbucketmigrator<0.1764.0>: Bucket 471 moved to the next server vbucketmigrator<0.1764.0>: Validate bucket states vbucketmigrator<0.1764.0>: 471 ok INFO REPORT <0.1766.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1766.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1766.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1766.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1766.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1766.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1766.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1766.0>: Starting to move bucket 472 vbucketmigrator<0.1766.0>: Bucket 472 moved to the next server vbucketmigrator<0.1766.0>: Validate bucket states vbucketmigrator<0.1766.0>: 472 ok INFO REPORT <0.1768.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1768.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1768.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1768.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1768.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1768.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1768.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1768.0>: Starting to move bucket 473 vbucketmigrator<0.1768.0>: Bucket 473 moved to the next server vbucketmigrator<0.1768.0>: Validate bucket states vbucketmigrator<0.1768.0>: 473 ok INFO REPORT <0.1770.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1770.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1770.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1770.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1770.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1770.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1770.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1770.0>: Starting to move bucket 474 vbucketmigrator<0.1770.0>: Bucket 474 moved to the next server vbucketmigrator<0.1770.0>: Validate bucket states vbucketmigrator<0.1770.0>: 474 ok INFO REPORT <0.1772.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1772.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1772.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1772.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1772.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1772.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1772.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1772.0>: Starting to move bucket 475 vbucketmigrator<0.1772.0>: Bucket 475 moved to the next server vbucketmigrator<0.1772.0>: Validate bucket states vbucketmigrator<0.1772.0>: 475 ok INFO REPORT <0.1774.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1774.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1774.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1774.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1774.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1774.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1774.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1774.0>: Starting to move bucket 476 vbucketmigrator<0.1774.0>: Bucket 476 moved to the next server vbucketmigrator<0.1774.0>: Validate bucket states vbucketmigrator<0.1774.0>: 476 ok INFO REPORT <0.1776.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1776.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1776.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1776.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1776.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1776.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1776.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1776.0>: Starting to move bucket 477 vbucketmigrator<0.1776.0>: Bucket 477 moved to the next server vbucketmigrator<0.1776.0>: Validate bucket states vbucketmigrator<0.1776.0>: 477 ok INFO REPORT <0.1778.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1778.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1778.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1778.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1778.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1778.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1778.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1778.0>: Starting to move bucket 478 vbucketmigrator<0.1778.0>: Bucket 478 moved to the next server vbucketmigrator<0.1778.0>: Validate bucket states vbucketmigrator<0.1778.0>: 478 ok INFO REPORT <0.1780.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1780.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1780.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1780.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1780.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1780.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1780.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1780.0>: Starting to move bucket 479 vbucketmigrator<0.1780.0>: Bucket 479 moved to the next server vbucketmigrator<0.1780.0>: Validate bucket states vbucketmigrator<0.1780.0>: 479 ok INFO REPORT <0.1782.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1782.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1782.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1782.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1782.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1782.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1782.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1782.0>: Starting to move bucket 480 vbucketmigrator<0.1782.0>: Bucket 480 moved to the next server vbucketmigrator<0.1782.0>: Validate bucket states vbucketmigrator<0.1782.0>: 480 ok INFO REPORT <0.1784.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1784.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1784.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1784.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1784.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1784.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1784.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1784.0>: Starting to move bucket 481 vbucketmigrator<0.1784.0>: Bucket 481 moved to the next server vbucketmigrator<0.1784.0>: Validate bucket states vbucketmigrator<0.1784.0>: 481 ok INFO REPORT <0.1786.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1786.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1786.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1786.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1786.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1786.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1786.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1786.0>: Starting to move bucket 482 vbucketmigrator<0.1786.0>: Bucket 482 moved to the next server vbucketmigrator<0.1786.0>: Validate bucket states vbucketmigrator<0.1786.0>: 482 ok INFO REPORT <0.1788.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1788.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1788.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1788.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1788.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1788.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1788.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1788.0>: Starting to move bucket 483 vbucketmigrator<0.1788.0>: Bucket 483 moved to the next server vbucketmigrator<0.1788.0>: Validate bucket states vbucketmigrator<0.1788.0>: 483 ok INFO REPORT <0.1790.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1790.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1790.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1790.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1790.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1790.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1790.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1790.0>: Starting to move bucket 484 vbucketmigrator<0.1790.0>: Bucket 484 moved to the next server vbucketmigrator<0.1790.0>: Validate bucket states vbucketmigrator<0.1790.0>: 484 ok INFO REPORT <0.1792.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1792.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1792.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1792.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1792.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1792.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1792.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1792.0>: Starting to move bucket 485 vbucketmigrator<0.1792.0>: Bucket 485 moved to the next server vbucketmigrator<0.1792.0>: Validate bucket states vbucketmigrator<0.1792.0>: 485 ok INFO REPORT <0.1794.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1794.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1794.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1794.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1794.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1794.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1794.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1794.0>: Starting to move bucket 486 vbucketmigrator<0.1794.0>: Bucket 486 moved to the next server vbucketmigrator<0.1794.0>: Validate bucket states vbucketmigrator<0.1794.0>: 486 ok INFO REPORT <0.1796.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1796.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1796.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1796.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1796.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1796.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1796.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1796.0>: Starting to move bucket 487 vbucketmigrator<0.1796.0>: Bucket 487 moved to the next server vbucketmigrator<0.1796.0>: Validate bucket states vbucketmigrator<0.1796.0>: 487 ok INFO REPORT <0.1798.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1798.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1798.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1798.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1798.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1798.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1798.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1798.0>: Starting to move bucket 488 vbucketmigrator<0.1798.0>: Bucket 488 moved to the next server vbucketmigrator<0.1798.0>: Validate bucket states vbucketmigrator<0.1798.0>: 488 ok INFO REPORT <0.1800.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1800.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1800.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1800.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1800.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1800.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1800.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1800.0>: Starting to move bucket 489 vbucketmigrator<0.1800.0>: Bucket 489 moved to the next server vbucketmigrator<0.1800.0>: Validate bucket states vbucketmigrator<0.1800.0>: 489 ok INFO REPORT <0.1802.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1802.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1802.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1802.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1802.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1802.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1802.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1802.0>: Starting to move bucket 490 vbucketmigrator<0.1802.0>: Bucket 490 moved to the next server vbucketmigrator<0.1802.0>: Validate bucket states vbucketmigrator<0.1802.0>: 490 ok INFO REPORT <0.1804.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1804.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1804.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1804.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1804.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1804.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1804.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1804.0>: Starting to move bucket 491 vbucketmigrator<0.1804.0>: Bucket 491 moved to the next server vbucketmigrator<0.1804.0>: Validate bucket states vbucketmigrator<0.1804.0>: 491 ok INFO REPORT <0.1806.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1806.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1806.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1806.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1806.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1806.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1806.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1806.0>: Starting to move bucket 492 vbucketmigrator<0.1806.0>: Bucket 492 moved to the next server vbucketmigrator<0.1806.0>: Validate bucket states vbucketmigrator<0.1806.0>: 492 ok INFO REPORT <0.1808.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1808.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1808.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1808.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1808.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1808.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1808.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1808.0>: Starting to move bucket 493 vbucketmigrator<0.1808.0>: Bucket 493 moved to the next server vbucketmigrator<0.1808.0>: Validate bucket states vbucketmigrator<0.1808.0>: 493 ok INFO REPORT <0.1810.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1810.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1810.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1810.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1810.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1810.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1810.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1810.0>: Starting to move bucket 494 vbucketmigrator<0.1810.0>: Bucket 494 moved to the next server vbucketmigrator<0.1810.0>: Validate bucket states vbucketmigrator<0.1810.0>: 494 ok INFO REPORT <0.1812.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1812.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1812.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1812.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1812.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1812.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1812.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1812.0>: Starting to move bucket 495 vbucketmigrator<0.1812.0>: Bucket 495 moved to the next server vbucketmigrator<0.1812.0>: Validate bucket states vbucketmigrator<0.1812.0>: 495 ok INFO REPORT <0.1814.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1814.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1814.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1814.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1814.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1814.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1814.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1814.0>: Starting to move bucket 496 vbucketmigrator<0.1814.0>: Bucket 496 moved to the next server vbucketmigrator<0.1814.0>: Validate bucket states vbucketmigrator<0.1814.0>: 496 ok INFO REPORT <0.1816.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1816.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1816.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1816.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1816.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1816.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1816.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1816.0>: Starting to move bucket 497 vbucketmigrator<0.1816.0>: Bucket 497 moved to the next server vbucketmigrator<0.1816.0>: Validate bucket states vbucketmigrator<0.1816.0>: 497 ok INFO REPORT <0.1818.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1818.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1818.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1818.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1818.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1818.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1818.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1818.0>: Starting to move bucket 498 vbucketmigrator<0.1818.0>: Bucket 498 moved to the next server vbucketmigrator<0.1818.0>: Validate bucket states vbucketmigrator<0.1818.0>: 498 ok INFO REPORT <0.1823.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1823.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1823.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1823.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1823.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1823.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1823.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1823.0>: Starting to move bucket 499 vbucketmigrator<0.1823.0>: Bucket 499 moved to the next server vbucketmigrator<0.1823.0>: Validate bucket states vbucketmigrator<0.1823.0>: 499 ok INFO REPORT <0.1825.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1825.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1825.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1825.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1825.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1825.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1825.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1825.0>: Starting to move bucket 500 vbucketmigrator<0.1825.0>: Bucket 500 moved to the next server vbucketmigrator<0.1825.0>: Validate bucket states vbucketmigrator<0.1825.0>: 500 ok INFO REPORT <0.1827.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1827.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1827.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1827.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1827.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1827.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1827.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1827.0>: Starting to move bucket 501 vbucketmigrator<0.1827.0>: Bucket 501 moved to the next server vbucketmigrator<0.1827.0>: Validate bucket states vbucketmigrator<0.1827.0>: 501 ok INFO REPORT <0.1829.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1829.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1829.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1829.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1829.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1829.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1829.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1829.0>: Starting to move bucket 502 vbucketmigrator<0.1829.0>: Bucket 502 moved to the next server vbucketmigrator<0.1829.0>: Validate bucket states vbucketmigrator<0.1829.0>: 502 ok INFO REPORT <0.1831.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1831.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1831.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1831.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1831.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1831.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1831.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1831.0>: Starting to move bucket 503 vbucketmigrator<0.1831.0>: Bucket 503 moved to the next server vbucketmigrator<0.1831.0>: Validate bucket states vbucketmigrator<0.1831.0>: 503 ok INFO REPORT <0.1833.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1833.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1833.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1833.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1833.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1833.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1833.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1833.0>: Starting to move bucket 504 vbucketmigrator<0.1833.0>: Bucket 504 moved to the next server vbucketmigrator<0.1833.0>: Validate bucket states vbucketmigrator<0.1833.0>: 504 ok INFO REPORT <0.1835.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1835.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1835.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1835.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1835.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1835.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1835.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1835.0>: Starting to move bucket 505 vbucketmigrator<0.1835.0>: Bucket 505 moved to the next server vbucketmigrator<0.1835.0>: Validate bucket states vbucketmigrator<0.1835.0>: 505 ok INFO REPORT <0.1837.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1837.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1837.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1837.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1837.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1837.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1837.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1837.0>: Starting to move bucket 506 vbucketmigrator<0.1837.0>: Bucket 506 moved to the next server vbucketmigrator<0.1837.0>: Validate bucket states vbucketmigrator<0.1837.0>: 506 ok INFO REPORT <0.1839.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1839.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1839.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1839.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1839.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1839.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1839.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1839.0>: Starting to move bucket 507 vbucketmigrator<0.1839.0>: Bucket 507 moved to the next server vbucketmigrator<0.1839.0>: Validate bucket states vbucketmigrator<0.1839.0>: 507 ok INFO REPORT <0.1841.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1841.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1841.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1841.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1841.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1841.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1841.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1841.0>: Starting to move bucket 508 vbucketmigrator<0.1841.0>: Bucket 508 moved to the next server vbucketmigrator<0.1841.0>: Validate bucket states vbucketmigrator<0.1841.0>: 508 ok INFO REPORT <0.1843.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1843.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1843.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1843.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1843.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1843.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1843.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1843.0>: Starting to move bucket 509 vbucketmigrator<0.1843.0>: Bucket 509 moved to the next server vbucketmigrator<0.1843.0>: Validate bucket states vbucketmigrator<0.1843.0>: 509 ok INFO REPORT <0.1845.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1845.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1845.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1845.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1845.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1845.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1845.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1845.0>: Starting to move bucket 510 vbucketmigrator<0.1845.0>: Bucket 510 moved to the next server vbucketmigrator<0.1845.0>: Validate bucket states vbucketmigrator<0.1845.0>: 510 ok INFO REPORT <0.1847.0> 2011-01-03 12:56:46 =============================================================================== vbucketmigrator<0.1847.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1847.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1847.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1847.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1847.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1847.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1847.0>: Starting to move bucket 511 vbucketmigrator<0.1847.0>: Bucket 511 moved to the next server vbucketmigrator<0.1847.0>: Validate bucket states vbucketmigrator<0.1847.0>: 511 ok INFO REPORT <0.65.0> 2011-01-03 12:56:46 =============================================================================== config change: buckets -> [{configs,[{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers,['ns_1@10.2.1.100','ns_1@10.2.1.101']}, {map,[['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101'|...], [...]|...]}]}]}] INFO REPORT <0.65.0> 2011-01-03 12:56:46 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:56:46 =============================================================================== Pushing config INFO REPORT <0.110.0> 2011-01-03 12:56:46 =============================================================================== ns_log: logging ns_orchestrator:1:Rebalance completed successfully. INFO REPORT <0.85.0> 2011-01-03 12:56:46 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 12:56:46 =============================================================================== config change: rebalance_status -> none INFO REPORT <0.65.0> 2011-01-03 12:56:46 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 12:56:46 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 12:56:46 =============================================================================== Pushing config done INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 53 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 54 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 57 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 58 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 61 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 62 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 65 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 66 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 69 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 70 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 71 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 72 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 73 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 74 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 75 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 76 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 77 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 78 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 79 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 80 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 81 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 82 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 83 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 84 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 85 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 86 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 87 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 88 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 89 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 90 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 91 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 92 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 93 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 94 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 95 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 96 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 97 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 98 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 99 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 100 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 101 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 102 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 103 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 104 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 105 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 106 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 107 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 108 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 109 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 110 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 111 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 112 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 113 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 114 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 115 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 116 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 117 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 118 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 119 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 120 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 121 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 122 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 123 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 124 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 125 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 126 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 127 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 128 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 129 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 130 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 131 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 132 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 133 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 134 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 135 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 136 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 137 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 138 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 139 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 140 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 141 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 142 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 143 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 144 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 145 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 146 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 147 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 148 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 149 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 150 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 151 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 152 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 153 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 154 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 155 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 156 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 157 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 158 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 159 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 160 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 161 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 162 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 163 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 164 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 165 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 166 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 167 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 168 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 169 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 170 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 171 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 172 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 173 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 174 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 175 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 176 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 177 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 178 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 179 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 180 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 181 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 182 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 183 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 184 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 185 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 186 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 187 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 188 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 189 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 190 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 191 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 192 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 193 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 194 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 195 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 196 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 197 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 198 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 199 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 200 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 201 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 202 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 203 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 204 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 205 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 206 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 207 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 208 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 209 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 210 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 211 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 212 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 213 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 214 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 215 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 216 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 217 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 218 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 219 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 220 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 221 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 222 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 223 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 224 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 225 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 226 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 227 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 228 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 229 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 230 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 231 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 232 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 233 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 234 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 235 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 236 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 237 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 238 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 239 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 240 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 241 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 242 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 243 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 244 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 245 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 246 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 247 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 248 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 249 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 250 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 251 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 252 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 253 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 254 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 255 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 256 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 257 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 258 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 259 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 260 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 261 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 262 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 263 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 264 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 265 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 266 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 267 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 268 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 269 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 270 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 271 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 272 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 273 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 274 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 275 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 276 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 277 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 278 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 279 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 280 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 281 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 282 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 283 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 284 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 285 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 286 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 287 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 288 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 289 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 290 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 291 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 292 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 293 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 294 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 295 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 296 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 297 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 298 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 299 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 300 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 301 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 302 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 303 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 304 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 305 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 306 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 307 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 308 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 309 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 310 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 311 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 312 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 313 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 314 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 315 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 316 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 317 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 318 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 319 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 320 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 321 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 322 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 323 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 324 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 325 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 326 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 327 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 328 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 329 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 330 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 331 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 332 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 333 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 334 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 335 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 336 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 337 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 338 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 339 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 340 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 341 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 342 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 343 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 344 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 345 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 346 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 347 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 348 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 349 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 350 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 351 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 352 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 353 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 354 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 355 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 356 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 357 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 358 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 359 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 360 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 361 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 362 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 363 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 364 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 365 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 366 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 367 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 368 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 369 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 370 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 371 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 372 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 373 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 374 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 375 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 376 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 377 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 378 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 379 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 380 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 381 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 382 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 383 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 384 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 385 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 386 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 387 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 388 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 389 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 390 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 391 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 392 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 393 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 394 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 395 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 396 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 397 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 398 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 399 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 400 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 401 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 402 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 403 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 404 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 405 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 406 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 407 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 408 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 409 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 410 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 411 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 412 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 413 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 414 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 415 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 416 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 417 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 418 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 419 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 420 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 421 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 422 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 423 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 424 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 425 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 426 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 427 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 428 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 429 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 430 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 431 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 432 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 433 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 434 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 435 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 436 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 437 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 438 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 439 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 440 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 441 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 442 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 443 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 444 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 445 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 446 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 447 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 448 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 449 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 450 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 451 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 452 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 453 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 454 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 455 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 456 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 457 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 458 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 459 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 460 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 461 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 462 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 463 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 464 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 465 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 466 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 467 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 468 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 469 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 470 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 471 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 472 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 473 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 474 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 475 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 476 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 477 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 478 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 479 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 480 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 481 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 482 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 483 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 484 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 485 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 486 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 487 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 488 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 489 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 490 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 491 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 492 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 493 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 494 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 495 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 496 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 497 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 498 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 499 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 500 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 501 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 502 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 503 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 504 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 505 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 506 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 507 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 508 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 509 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 510 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 511 on master 'ns_1@10.2.1.101' because of {{'ns_1@10.2.1.101', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [1023,1022,1021,1020,1019,1018,1017,1016,1015,1014,1013,1012,1011,1010,1009,1008,1007,1006,1005,1004,1003,1002,1001,1000,999,998,997,996,995,994,993,992,991,990,989,988,987,986,985,984,983,982,981,980,979,978,977,976,975,974,973,972,971,970,969,968,967,966,965,964,963,962,961,960,959,958,957,956,955,954,953,952,951,950,949,948,947,946,945,944,943,942,941,940,939,938,937,936,935,934,933,932,931,930,929,928,927,926,925,924,923,922,921,920,919,918,917,916,915,914,913,912,911,910,909,908,907,906,905,904,903,902,901,900,899,898,897,896,895,894,893,892,891,890,889,888,887,886,885,884,883,882,881,880,879,878,877,876,875,874,873,872,871,870,869,868,867,866,865,864,863,862,861,860,859,858,857,856,855,854,853,852,851,850,849,848,847,846,845,844,843,842,841,840,839,838,837,836,835,834,833,832,831,830,829,828,827,826,825,824,823,822,821,820,819,818,817,816,815,814,813,812,811,810,809,808,807,806,805,804,803,802,801,800,799,798,797,796,795,794,793,792,791,790,789,788,787,786,785,784,783,782,781,780,779,778,777,776,775,774,773,772,771,770,769,768,767,766,765,764,763,762,761,760,759,758,757,756,755,754,753,752,751,750,749,748,747,746,745,744,743,742,741,740,739,738,737,736,735,734,733,732,731,730,729,728,727,726,725,724,723,722,721,720,719,718,717,716,715,714,713,712,711,710,709,708,707,706,705,704,703,702,701,700,699,698,697,696,695,694,693,692,691,690,689,688,687,686,685,684,683,682,681,680,679,678,677,676,675,674,673,672,671,670,669,668,667,666,665,664,663,662,661,660,659,658,657,656,655,654,653,652,651,650,649,648,647,646,645,644,643,642,641,640,639,638,637,636,635,634,633,632,631,630,629,628,627,626,625,624,623,622,621,620,619,618,617,616,615,614,613,612,611,610,609,608,607,606,605,604,603,602,601,600,599,598,597,596,595,594,593,592,591,590,589,588,587,586,585,584,583,582,581,580,579,578,577,576,575,574,573,572,571,570,569,568,567,566,565,564,563,562,561,560,559,558,557,556,555,554,553,552,551,550,549,548,547,546,545,544,543,542,541,540,539,538,537,536,535,534,533,532,531,530,529,528,527,526,525,524,523,522,521,520,519,518,517,516,515,514,513,512] in bucket "default" from node 'ns_1@10.2.1.100' to node 'ns_1@10.2.1.101' INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210","-d","10.2.1.101:11210","-A", "-v","-b","1023","-b","1022","-b","1021","-b","1020","-b","1019","-b", "1018","-b","1017","-b","1016","-b","1015","-b","1014","-b","1013","-b", "1012","-b","1011","-b","1010","-b","1009","-b","1008","-b","1007","-b", "1006","-b","1005","-b","1004","-b","1003","-b","1002","-b","1001","-b", "1000","-b","999","-b","998","-b","997","-b","996","-b","995","-b","994", "-b","993","-b","992","-b","991","-b","990","-b","989","-b","988","-b", "987","-b","986","-b","985","-b","984","-b","983","-b","982","-b","981", "-b","980","-b","979","-b","978","-b","977","-b","976","-b","975","-b", "974","-b","973","-b","972","-b","971","-b","970","-b","969","-b","968", "-b","967","-b","966","-b","965","-b","964","-b","963","-b","962","-b", "961","-b","960","-b","959","-b","958","-b","957","-b","956","-b","955", "-b","954","-b","953","-b","952","-b","951","-b","950","-b","949","-b", "948","-b","947","-b","946","-b","945","-b","944","-b","943","-b","942", "-b","941","-b","940","-b","939","-b","938","-b","937","-b","936","-b", "935","-b","934","-b","933","-b","932","-b","931","-b","930","-b","929", "-b","928","-b","927","-b","926","-b","925","-b","924","-b","923","-b", "922","-b","921","-b","920","-b","919","-b","918","-b","917","-b","916", "-b","915","-b","914","-b","913","-b","912","-b","911","-b","910","-b", "909","-b","908","-b","907","-b","906","-b","905","-b","904","-b","903", "-b","902","-b","901","-b","900","-b","899","-b","898","-b","897","-b", "896","-b","895","-b","894","-b","893","-b","892","-b","891","-b","890", "-b","889","-b","888","-b","887","-b","886","-b","885","-b","884","-b", "883","-b","882","-b","881","-b","880","-b","879","-b","878","-b","877", "-b","876","-b","875","-b","874","-b","873","-b","872","-b","871","-b", "870","-b","869","-b","868","-b","867","-b","866","-b","865","-b","864", "-b","863","-b","862","-b","861","-b","860","-b","859","-b","858","-b", "857","-b","856","-b","855","-b","854","-b","853","-b","852","-b","851", "-b","850","-b","849","-b","848","-b","847","-b","846","-b","845","-b", "844","-b","843","-b","842","-b","841","-b","840","-b","839","-b","838", "-b","837","-b","836","-b","835","-b","834","-b","833","-b","832","-b", "831","-b","830","-b","829","-b","828","-b","827","-b","826","-b","825", "-b","824","-b","823","-b","822","-b","821","-b","820","-b","819","-b", "818","-b","817","-b","816","-b","815","-b","814","-b","813","-b","812", "-b","811","-b","810","-b","809","-b","808","-b","807","-b","806","-b", "805","-b","804","-b","803","-b","802","-b","801","-b","800","-b","799", "-b","798","-b","797","-b","796","-b","795","-b","794","-b","793","-b", "792","-b","791","-b","790","-b","789","-b","788","-b","787","-b","786", "-b","785","-b","784","-b","783","-b","782","-b","781","-b","780","-b", "779","-b","778","-b","777","-b","776","-b","775","-b","774","-b","773", "-b","772","-b","771","-b","770","-b","769","-b","768","-b","767","-b", "766","-b","765","-b","764","-b","763","-b","762","-b","761","-b","760", "-b","759","-b","758","-b","757","-b","756","-b","755","-b","754","-b", "753","-b","752","-b","751","-b","750","-b","749","-b","748","-b","747", "-b","746","-b","745","-b","744","-b","743","-b","742","-b","741","-b", "740","-b","739","-b","738","-b","737","-b","736","-b","735","-b","734", "-b","733","-b","732","-b","731","-b","730","-b","729","-b","728","-b", "727","-b","726","-b","725","-b","724","-b","723","-b","722","-b","721", "-b","720","-b","719","-b","718","-b","717","-b","716","-b","715","-b", "714","-b","713","-b","712","-b","711","-b","710","-b","709","-b","708", "-b","707","-b","706","-b","705","-b","704","-b","703","-b","702","-b", "701","-b","700","-b","699","-b","698","-b","697","-b","696","-b","695", "-b","694","-b","693","-b","692","-b","691","-b","690","-b","689","-b", "688","-b","687","-b","686","-b","685","-b","684","-b","683","-b","682", "-b","681","-b","680","-b","679","-b","678","-b","677","-b","676","-b", "675","-b","674","-b","673","-b","672","-b","671","-b","670","-b","669", "-b","668","-b","667","-b","666","-b","665","-b","664","-b","663","-b", "662","-b","661","-b","660","-b","659","-b","658","-b","657","-b","656", "-b","655","-b","654","-b","653","-b","652","-b","651","-b","650","-b", "649","-b","648","-b","647","-b","646","-b","645","-b","644","-b","643", "-b","642","-b","641","-b","640","-b","639","-b","638","-b","637","-b", "636","-b","635","-b","634","-b","633","-b","632","-b","631","-b","630", "-b","629","-b","628","-b","627","-b","626","-b","625","-b","624","-b", "623","-b","622","-b","621","-b","620","-b","619","-b","618","-b","617", "-b","616","-b","615","-b","614","-b","613","-b","612","-b","611","-b", "610","-b","609","-b","608","-b","607","-b","606","-b","605","-b","604", "-b","603","-b","602","-b","601","-b","600","-b","599","-b","598","-b", "597","-b","596","-b","595","-b","594","-b","593","-b","592","-b","591", "-b","590","-b","589","-b","588","-b","587","-b","586","-b","585","-b", "584","-b","583","-b","582","-b","581","-b","580","-b","579","-b","578", "-b","577","-b","576","-b","575","-b","574","-b","573","-b","572","-b", "571","-b","570","-b","569","-b","568","-b","567","-b","566","-b","565", "-b","564","-b","563","-b","562","-b","561","-b","560","-b","559","-b", "558","-b","557","-b","556","-b","555","-b","554","-b","553","-b","552", "-b","551","-b","550","-b","549","-b","548","-b","547","-b","546","-b", "545","-b","544","-b","543","-b","542","-b","541","-b","540","-b","539", "-b","538","-b","537","-b","536","-b","535","-b","534","-b","533","-b", "532","-b","531","-b","530","-b","529","-b","528","-b","527","-b","526", "-b","525","-b","524","-b","523","-b","522","-b","521","-b","520","-b", "519","-b","518","-b","517","-b","516","-b","515","-b","514","-b","513", "-b","512"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] PROGRESS REPORT <0.260.0> 2011-01-03 12:56:48 =============================================================================== supervisor {local,'ns_vbm_sup-default'} started [{pid,<0.1865.0>}, {name,{child_id,[1023,1022,1021,1020,1019,1018,1017,1016,1015,1014, 1013,1012,1011,1010,1009,1008,1007,1006,1005,1004, 1003,1002,1001,1000,999,998,997,996,995,994,993, 992,991,990,989,988,987,986,985,984,983,982,981, 980,979,978,977,976,975,974,973,972,971,970,969, 968,967,966,965,964,963,962,961,960,959,958,957, 956,955,954,953,952,951,950,949,948,947,946,945, 944,943,942,941,940,939,938,937,936,935,934,933, 932,931,930,929,928,927,926,925,924,923,922,921, 920,919,918,917,916,915,914,913,912,911,910,909, 908,907,906,905,904,903,902,901,900,899,898,897, 896,895,894,893,892,891,890,889,888,887,886,885, 884,883,882,881,880,879,878,877,876,875,874,873, 872,871,870,869,868,867,866,865,864,863,862,861, 860,859,858,857,856,855,854,853,852,851,850,849, 848,847,846,845,844,843,842,841,840,839,838,837, 836,835,834,833,832,831,830,829,828,827,826,825, 824,823,822,821,820,819,818,817,816,815,814,813, 812,811,810,809,808,807,806,805,804,803,802,801, 800,799,798,797,796,795,794,793,792,791,790,789, 788,787,786,785,784,783,782,781,780,779,778,777, 776,775,774,773,772,771,770,769,768,767,766,765, 764,763,762,761,760,759,758,757,756,755,754,753, 752,751,750,749,748,747,746,745,744,743,742,741, 740,739,738,737,736,735,734,733,732,731,730,729, 728,727,726,725,724,723,722,721,720,719,718,717, 716,715,714,713,712,711,710,709,708,707,706,705, 704,703,702,701,700,699,698,697,696,695,694,693, 692,691,690,689,688,687,686,685,684,683,682,681, 680,679,678,677,676,675,674,673,672,671,670,669, 668,667,666,665,664,663,662,661,660,659,658,657, 656,655,654,653,652,651,650,649,648,647,646,645, 644,643,642,641,640,639,638,637,636,635,634,633, 632,631,630,629,628,627,626,625,624,623,622,621, 620,619,618,617,616,615,614,613,612,611,610,609, 608,607,606,605,604,603,602,601,600,599,598,597, 596,595,594,593,592,591,590,589,588,587,586,585, 584,583,582,581,580,579,578,577,576,575,574,573, 572,571,570,569,568,567,566,565,564,563,562,561, 560,559,558,557,556,555,554,553,552,551,550,549, 548,547,546,545,544,543,542,541,540,539,538,537, 536,535,534,533,532,531,530,529,528,527,526,525, 524,523,522,521,520,519,518,517,516,515,514,513, 512], 'ns_1@10.2.1.101'}}, {mfa,{ns_port_server,start_link, [vbucketmigrator, "./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210", "-d","10.2.1.101:11210","-A","-v","-b", "1023","-b","1022","-b","1021","-b","1020", "-b","1019","-b","1018","-b","1017","-b", "1016","-b","1015","-b","1014","-b","1013", "-b","1012","-b","1011","-b","1010","-b", "1009","-b","1008","-b","1007","-b","1006", "-b","1005","-b","1004","-b","1003","-b", "1002","-b","1001","-b","1000","-b","999", "-b","998","-b","997","-b","996","-b","995", "-b","994","-b","993","-b","992","-b","991", "-b","990","-b","989","-b","988","-b","987", "-b","986","-b","985","-b","984","-b","983", "-b","982","-b","981","-b","980","-b","979", "-b","978","-b","977","-b","976","-b","975", "-b","974","-b","973","-b","972","-b","971", "-b","970","-b","969","-b","968","-b","967", "-b","966","-b","965","-b","964","-b","963", "-b","962","-b","961","-b","960","-b","959", "-b","958","-b","957","-b","956","-b","955", "-b","954","-b","953","-b","952","-b","951", "-b","950","-b","949","-b","948","-b","947", "-b","946","-b","945","-b","944","-b","943", "-b","942","-b","941","-b","940","-b","939", "-b","938","-b","937","-b","936","-b","935", "-b","934","-b","933","-b","932","-b","931", "-b","930","-b","929","-b","928","-b","927", "-b","926","-b","925","-b","924","-b","923", "-b","922","-b","921","-b","920","-b","919", "-b","918","-b","917","-b","916","-b","915", "-b","914","-b","913","-b","912","-b","911", "-b","910","-b","909","-b","908","-b","907", "-b","906","-b","905","-b","904","-b","903", "-b","902","-b","901","-b","900","-b","899", "-b","898","-b","897","-b","896","-b","895", "-b","894","-b","893","-b","892","-b","891", "-b","890","-b","889","-b","888","-b","887", "-b","886","-b","885","-b","884","-b","883", "-b","882","-b","881","-b","880","-b","879", "-b","878","-b","877","-b","876","-b","875", "-b","874","-b","873","-b","872","-b","871", "-b","870","-b","869","-b","868","-b","867", "-b","866","-b","865","-b","864","-b","863", "-b","862","-b","861","-b","860","-b","859", "-b","858","-b","857","-b","856","-b","855", "-b","854","-b","853","-b","852","-b","851", "-b","850","-b","849","-b","848","-b","847", "-b","846","-b","845","-b","844","-b","843", "-b","842","-b","841","-b","840","-b","839", "-b","838","-b","837","-b","836","-b","835", "-b","834","-b","833","-b","832","-b","831", "-b","830","-b","829","-b","828","-b","827", "-b","826","-b","825","-b","824","-b","823", "-b","822","-b","821","-b","820","-b","819", "-b","818","-b","817","-b","816","-b","815", "-b","814","-b","813","-b","812","-b","811", "-b","810","-b","809","-b","808","-b","807", "-b","806","-b","805","-b","804","-b","803", "-b","802","-b","801","-b","800","-b","799", "-b","798","-b","797","-b","796","-b","795", "-b","794","-b","793","-b","792","-b","791", "-b","790","-b","789","-b","788","-b","787", "-b","786","-b","785","-b","784","-b","783", "-b","782","-b","781","-b","780","-b","779", "-b","778","-b","777","-b","776","-b","775", "-b","774","-b","773","-b","772","-b","771", "-b","770","-b","769","-b","768","-b","767", "-b","766","-b","765","-b","764","-b","763", "-b","762","-b","761","-b","760","-b","759", "-b","758","-b","757","-b","756","-b","755", "-b","754","-b","753","-b","752","-b","751", "-b","750","-b","749","-b","748","-b","747", "-b","746","-b","745","-b","744","-b","743", "-b","742","-b","741","-b","740","-b","739", "-b","738","-b","737","-b","736","-b","735", "-b","734","-b","733","-b","732","-b","731", "-b","730","-b","729","-b","728","-b","727", "-b","726","-b","725","-b","724","-b","723", "-b","722","-b","721","-b","720","-b","719", "-b","718","-b","717","-b","716","-b","715", "-b","714","-b","713","-b","712","-b","711", "-b","710","-b","709","-b","708","-b","707", "-b","706","-b","705","-b","704","-b","703", "-b","702","-b","701","-b","700","-b","699", "-b","698","-b","697","-b","696","-b","695", "-b","694","-b","693","-b","692","-b","691", "-b","690","-b","689","-b","688","-b","687", "-b","686","-b","685","-b","684","-b","683", "-b","682","-b","681","-b","680","-b","679", "-b","678","-b","677","-b","676","-b","675", "-b","674","-b","673","-b","672","-b","671", "-b","670","-b","669","-b","668","-b","667", "-b","666","-b","665","-b","664","-b","663", "-b","662","-b","661","-b","660","-b","659", "-b","658","-b","657","-b","656","-b","655", "-b","654","-b","653","-b","652","-b","651", "-b","650","-b","649","-b","648","-b","647", "-b","646","-b","645","-b","644","-b","643", "-b","642","-b","641","-b","640","-b","639", "-b","638","-b","637","-b","636","-b","635", "-b","634","-b","633","-b","632","-b","631", "-b","630","-b","629","-b","628","-b","627", "-b","626","-b","625","-b","624","-b","623", "-b","622","-b","621","-b","620","-b","619", "-b","618","-b","617","-b","616","-b","615", "-b","614","-b","613","-b","612","-b","611", "-b","610","-b","609","-b","608","-b","607", "-b","606","-b","605","-b","604","-b","603", "-b","602","-b","601","-b","600","-b","599", "-b","598","-b","597","-b","596","-b","595", "-b","594","-b","593","-b","592","-b","591", "-b","590","-b","589","-b","588","-b","587", "-b","586","-b","585","-b","584","-b","583", "-b","582","-b","581","-b","580","-b","579", "-b","578","-b","577","-b","576","-b","575", "-b","574","-b","573","-b","572","-b","571", "-b","570","-b","569","-b","568","-b","567", "-b","566","-b","565","-b","564","-b","563", "-b","562","-b","561","-b","560","-b","559", "-b","558","-b","557","-b","556","-b","555", "-b","554","-b","553","-b","552","-b","551", "-b","550","-b","549","-b","548","-b","547", "-b","546","-b","545","-b","544","-b","543", "-b","542","-b","541","-b","540","-b","539", "-b","538","-b","537","-b","536","-b","535", "-b","534","-b","533","-b","532","-b","531", "-b","530","-b","529","-b","528","-b","527", "-b","526","-b","525","-b","524","-b","523", "-b","522","-b","521","-b","520","-b","519", "-b","518","-b","517","-b","516","-b","515", "-b","514","-b","513","-b","512"], [use_stdio,stderr_to_stdout, {write_data,[[],"\n"]}]]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [511,510,509,508,507,506,505,504,503,502,501,500,499,498,497,496,495,494,493,492,491,490,489,488,487,486,485,484,483,482,481,480,479,478,477,476,475,474,473,472,471,470,469,468,467,466,465,464,463,462,461,460,459,458,457,456,455,454,453,452,451,450,449,448,447,446,445,444,443,442,441,440,439,438,437,436,435,434,433,432,431,430,429,428,427,426,425,424,423,422,421,420,419,418,417,416,415,414,413,412,411,410,409,408,407,406,405,404,403,402,401,400,399,398,397,396,395,394,393,392,391,390,389,388,387,386,385,384,383,382,381,380,379,378,377,376,375,374,373,372,371,370,369,368,367,366,365,364,363,362,361,360,359,358,357,356,355,354,353,352,351,350,349,348,347,346,345,344,343,342,341,340,339,338,337,336,335,334,333,332,331,330,329,328,327,326,325,324,323,322,321,320,319,318,317,316,315,314,313,312,311,310,309,308,307,306,305,304,303,302,301,300,299,298,297,296,295,294,293,292,291,290,289,288,287,286,285,284,283,282,281,280,279,278,277,276,275,274,273,272,271,270,269,268,267,266,265,264,263,262,261,260,259,258,257,256,255,254,253,252,251,250,249,248,247,246,245,244,243,242,241,240,239,238,237,236,235,234,233,232,231,230,229,228,227,226,225,224,223,222,221,220,219,218,217,216,215,214,213,212,211,210,209,208,207,206,205,204,203,202,201,200,199,198,197,196,195,194,193,192,191,190,189,188,187,186,185,184,183,182,181,180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165,164,163,162,161,160,159,158,157,156,155,154,153,152,151,150,149,148,147,146,145,144,143,142,141,140,139,138,137,136,135,134,133,132,131,130,129,128,127,126,125,124,123,122,121,120,119,118,117,116,115,114,113,112,111,110,109,108,107,106,105,104,103,102,101,100,99,98,97,96,95,94,93,92,91,90,89,88,87,86,85,84,83,82,81,80,79,78,77,76,75,74,73,72,71,70,69,68,67,66,65,64,63,62,61,60,59,58,57,56,55,54,53,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.101' to node 'ns_1@10.2.1.100' INFO REPORT <0.1861.0> 2011-01-03 12:56:48 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.101:11210","-d","10.2.1.100:11210","-A", "-v","-b","511","-b","510","-b","509","-b","508","-b","507","-b","506","-b", "505","-b","504","-b","503","-b","502","-b","501","-b","500","-b","499", "-b","498","-b","497","-b","496","-b","495","-b","494","-b","493","-b", "492","-b","491","-b","490","-b","489","-b","488","-b","487","-b","486", "-b","485","-b","484","-b","483","-b","482","-b","481","-b","480","-b", "479","-b","478","-b","477","-b","476","-b","475","-b","474","-b","473", "-b","472","-b","471","-b","470","-b","469","-b","468","-b","467","-b", "466","-b","465","-b","464","-b","463","-b","462","-b","461","-b","460", "-b","459","-b","458","-b","457","-b","456","-b","455","-b","454","-b", "453","-b","452","-b","451","-b","450","-b","449","-b","448","-b","447", "-b","446","-b","445","-b","444","-b","443","-b","442","-b","441","-b", "440","-b","439","-b","438","-b","437","-b","436","-b","435","-b","434", "-b","433","-b","432","-b","431","-b","430","-b","429","-b","428","-b", "427","-b","426","-b","425","-b","424","-b","423","-b","422","-b","421", "-b","420","-b","419","-b","418","-b","417","-b","416","-b","415","-b", "414","-b","413","-b","412","-b","411","-b","410","-b","409","-b","408", "-b","407","-b","406","-b","405","-b","404","-b","403","-b","402","-b", "401","-b","400","-b","399","-b","398","-b","397","-b","396","-b","395", "-b","394","-b","393","-b","392","-b","391","-b","390","-b","389","-b", "388","-b","387","-b","386","-b","385","-b","384","-b","383","-b","382", "-b","381","-b","380","-b","379","-b","378","-b","377","-b","376","-b", "375","-b","374","-b","373","-b","372","-b","371","-b","370","-b","369", "-b","368","-b","367","-b","366","-b","365","-b","364","-b","363","-b", "362","-b","361","-b","360","-b","359","-b","358","-b","357","-b","356", "-b","355","-b","354","-b","353","-b","352","-b","351","-b","350","-b", "349","-b","348","-b","347","-b","346","-b","345","-b","344","-b","343", "-b","342","-b","341","-b","340","-b","339","-b","338","-b","337","-b", "336","-b","335","-b","334","-b","333","-b","332","-b","331","-b","330", "-b","329","-b","328","-b","327","-b","326","-b","325","-b","324","-b", "323","-b","322","-b","321","-b","320","-b","319","-b","318","-b","317", "-b","316","-b","315","-b","314","-b","313","-b","312","-b","311","-b", "310","-b","309","-b","308","-b","307","-b","306","-b","305","-b","304", "-b","303","-b","302","-b","301","-b","300","-b","299","-b","298","-b", "297","-b","296","-b","295","-b","294","-b","293","-b","292","-b","291", "-b","290","-b","289","-b","288","-b","287","-b","286","-b","285","-b", "284","-b","283","-b","282","-b","281","-b","280","-b","279","-b","278", "-b","277","-b","276","-b","275","-b","274","-b","273","-b","272","-b", "271","-b","270","-b","269","-b","268","-b","267","-b","266","-b","265", "-b","264","-b","263","-b","262","-b","261","-b","260","-b","259","-b", "258","-b","257","-b","256","-b","255","-b","254","-b","253","-b","252", "-b","251","-b","250","-b","249","-b","248","-b","247","-b","246","-b", "245","-b","244","-b","243","-b","242","-b","241","-b","240","-b","239", "-b","238","-b","237","-b","236","-b","235","-b","234","-b","233","-b", "232","-b","231","-b","230","-b","229","-b","228","-b","227","-b","226", "-b","225","-b","224","-b","223","-b","222","-b","221","-b","220","-b", "219","-b","218","-b","217","-b","216","-b","215","-b","214","-b","213", "-b","212","-b","211","-b","210","-b","209","-b","208","-b","207","-b", "206","-b","205","-b","204","-b","203","-b","202","-b","201","-b","200", "-b","199","-b","198","-b","197","-b","196","-b","195","-b","194","-b", "193","-b","192","-b","191","-b","190","-b","189","-b","188","-b","187", "-b","186","-b","185","-b","184","-b","183","-b","182","-b","181","-b", "180","-b","179","-b","178","-b","177","-b","176","-b","175","-b","174", "-b","173","-b","172","-b","171","-b","170","-b","169","-b","168","-b", "167","-b","166","-b","165","-b","164","-b","163","-b","162","-b","161", "-b","160","-b","159","-b","158","-b","157","-b","156","-b","155","-b", "154","-b","153","-b","152","-b","151","-b","150","-b","149","-b","148", "-b","147","-b","146","-b","145","-b","144","-b","143","-b","142","-b", "141","-b","140","-b","139","-b","138","-b","137","-b","136","-b","135", "-b","134","-b","133","-b","132","-b","131","-b","130","-b","129","-b", "128","-b","127","-b","126","-b","125","-b","124","-b","123","-b","122", "-b","121","-b","120","-b","119","-b","118","-b","117","-b","116","-b", "115","-b","114","-b","113","-b","112","-b","111","-b","110","-b","109", "-b","108","-b","107","-b","106","-b","105","-b","104","-b","103","-b", "102","-b","101","-b","100","-b","99","-b","98","-b","97","-b","96","-b", "95","-b","94","-b","93","-b","92","-b","91","-b","90","-b","89","-b","88", "-b","87","-b","86","-b","85","-b","84","-b","83","-b","82","-b","81","-b", "80","-b","79","-b","78","-b","77","-b","76","-b","75","-b","74","-b","73", "-b","72","-b","71","-b","70","-b","69","-b","68","-b","67","-b","66","-b", "65","-b","64","-b","63","-b","62","-b","61","-b","60","-b","59","-b","58", "-b","57","-b","56","-b","55","-b","54","-b","53","-b","52","-b","51","-b", "50","-b","49","-b","48","-b","47","-b","46","-b","45","-b","44","-b","43", "-b","42","-b","41","-b","40","-b","39","-b","38","-b","37","-b","36","-b", "35","-b","34","-b","33","-b","32","-b","31","-b","30","-b","29","-b","28", "-b","27","-b","26","-b","25","-b","24","-b","23","-b","22","-b","21","-b", "20","-b","19","-b","18","-b","17","-b","16","-b","15","-b","14","-b","13", "-b","12","-b","11","-b","10","-b","9","-b","8","-b","7","-b","6","-b","5", "-b","4","-b","3","-b","2","-b","1","-b","0"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] ERROR REPORT <0.1866.0> 2011-01-03 12:56:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1868.0> 2011-01-03 12:56:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.1865.0> 2011-01-03 12:56:49 =============================================================================== vbucketmigrator<0.1865.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.1865.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1865.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.1865.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.1865.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.1865.0>: Authenticated towards: {Sock 10.2.1.100:11210} ERROR REPORT <0.1873.0> 2011-01-03 12:56:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1875.0> 2011-01-03 12:56:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1882.0> 2011-01-03 12:56:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1884.0> 2011-01-03 12:56:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1899.0> 2011-01-03 12:56:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1901.0> 2011-01-03 12:57:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1906.0> 2011-01-03 12:57:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1910.0> 2011-01-03 12:57:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1915.0> 2011-01-03 12:57:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.93.0> 2011-01-03 12:57:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88227,399401}}, {active_buckets,["default"]}, {memory, [{total,13135064}, {processes,5984732}, {processes_used,5976404}, {system,7150332}, {atom,560301}, {atom_used,557416}, {binary,181072}, {code,4570913}, {ets,430372}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,239}, {memory_data,{4284698624,762580992,{<0.228.0>,786208}}}, {disk_data, [{"C:\\",48162864,49},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3482554368}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{230404,0}}, {context_switches,{71964,0}}, {garbage_collection,{21671,72318772,0}}, {io,{{input,6175484},{output,3722990}}}, {reductions,{19230181,286973}}, {run_queue,0}, {runtime,{2418,0}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88228,350400}}, {active_buckets,["default"]}, {memory, [{total,13287184}, {processes,5986100}, {processes_used,5979276}, {system,7301084}, {atom,559325}, {atom_used,555516}, {binary,434256}, {code,4543239}, {ets,368332}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,170}, {memory_data,{4284698624,627945472,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,36},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3475095552}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{160899,0}}, {context_switches,{33140,0}}, {garbage_collection,{5153,25573340,0}}, {io,{{input,6488834},{output,2264997}}}, {reductions,{6744434,96279}}, {run_queue,0}, {runtime,{1294,15}}]}]}] ERROR REPORT <0.1940.0> 2011-01-03 12:57:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1942.0> 2011-01-03 12:57:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1948.0> 2011-01-03 12:57:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1952.0> 2011-01-03 12:57:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1957.0> 2011-01-03 12:57:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1970.0> 2011-01-03 12:57:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1975.0> 2011-01-03 12:57:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 12:57:21 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.1977.0> 2011-01-03 12:57:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1984.0> 2011-01-03 12:57:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.1986.0> 2011-01-03 12:57:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2003.0> 2011-01-03 12:57:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2005.0> 2011-01-03 12:57:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.259.0> 2011-01-03 12:57:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 514 auth_errors 0 bucket_conns 13 bytes_read 79282566 bytes_written 1661571 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 27382 conn_yields 24 connection_structures 35 curr_connections 35 curr_items 27382 curr_items_tot 54656 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 55 ep_commit_time 0 ep_commit_time_total 16 ep_data_age 5 ep_data_age_highwat 14 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 6 ep_flush_duration_highwat 6 ep_flush_duration_total 17 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 11359 ep_io_num_read 0 ep_io_num_write 38254 ep_io_read_bytes 0 ep_io_write_bytes 54003538 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 82147968 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 0 ep_num_eject_failures 0 ep_num_eject_replicas 0 ep_num_expiry_pager_runs 0 ep_num_non_resident 0 ep_num_not_my_vbuckets 0 ep_num_pager_runs 0 ep_num_value_ejects 0 ep_oom_errors 0 ep_overhead 26297520 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 5046 ep_storage_age 5 ep_storage_age_highwat 12 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 82147968 ep_total_del_items 0 ep_total_enqueued 54674 ep_total_new_items 38254 ep_total_persisted 38254 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 108445488 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 513 tap_mutation_received 27274 tap_mutation_sent 26143 tap_opaque_received 513 tap_opaque_sent 1025 tap_vbucket_set_sent 1024 threads 4 time 1294088251 total_connections 548 uptime 265 version 1.4.4_304_g7d5a132 ERROR REPORT <0.2010.0> 2011-01-03 12:57:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2014.0> 2011-01-03 12:57:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2016.0> 2011-01-03 12:57:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2030.0> 2011-01-03 12:57:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2034.0> 2011-01-03 12:57:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2039.0> 2011-01-03 12:57:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2043.0> 2011-01-03 12:57:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2048.0> 2011-01-03 12:57:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2060.0> 2011-01-03 12:57:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2062.0> 2011-01-03 12:57:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2069.0> 2011-01-03 12:57:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2071.0> 2011-01-03 12:57:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2078.0> 2011-01-03 12:57:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2080.0> 2011-01-03 12:57:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 12:57:59 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2091.0> 2011-01-03 12:57:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2096.0> 2011-01-03 12:58:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2100.0> 2011-01-03 12:58:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2107.0> 2011-01-03 12:58:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2109.0> 2011-01-03 12:58:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.93.0> 2011-01-03 12:58:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88287,412401}}, {active_buckets,["default"]}, {memory, [{total,13176648}, {processes,5988628}, {processes_used,5979796}, {system,7188020}, {atom,560301}, {atom_used,557531}, {binary,184976}, {code,4570913}, {ets,464500}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,299}, {memory_data,{4284698624,812920832,{<0.228.0>,971680}}}, {disk_data, [{"C:\\",48162864,49},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3271561216}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{290402,0}}, {context_switches,{88861,0}}, {garbage_collection,{24654,95618607,0}}, {io,{{input,6409082},{output,4057359}}}, {reductions,{39081536,231981}}, {run_queue,0}, {runtime,{3244,0}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88288,348400}}, {active_buckets,["default"]}, {memory, [{total,13352088}, {processes,6069564}, {processes_used,6062332}, {system,7282524}, {atom,559325}, {atom_used,555631}, {binary,346592}, {code,4543239}, {ets,437852}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,230}, {memory_data,{4284698624,862138368,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,36},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3263762432}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{220897,0}}, {context_switches,{40835,0}}, {garbage_collection,{7581,42814212,0}}, {io,{{input,7431052},{output,2639083}}}, {reductions,{11816012,199846}}, {run_queue,0}, {runtime,{1965,47}}]}]}] ERROR REPORT <0.2126.0> 2011-01-03 12:58:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2128.0> 2011-01-03 12:58:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2133.0> 2011-01-03 12:58:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2139.0> 2011-01-03 12:58:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2144.0> 2011-01-03 12:58:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 12:58:16 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2155.0> 2011-01-03 12:58:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2160.0> 2011-01-03 12:58:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2162.0> 2011-01-03 12:58:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2167.0> 2011-01-03 12:58:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2173.0> 2011-01-03 12:58:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2185.0> 2011-01-03 12:58:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 12:58:29 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2190.0> 2011-01-03 12:58:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2192.0> 2011-01-03 12:58:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2199.0> 2011-01-03 12:58:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2203.0> 2011-01-03 12:58:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2217.0> 2011-01-03 12:58:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2219.0> 2011-01-03 12:58:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2224.0> 2011-01-03 12:58:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2228.0> 2011-01-03 12:58:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2233.0> 2011-01-03 12:58:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 12:58:46 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2247.0> 2011-01-03 12:58:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2249.0> 2011-01-03 12:58:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2251.0> 2011-01-03 12:58:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2256.0> 2011-01-03 12:58:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2260.0> 2011-01-03 12:58:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2265.0> 2011-01-03 12:58:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2278.0> 2011-01-03 12:58:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2283.0> 2011-01-03 12:59:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2285.0> 2011-01-03 12:59:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2292.0> 2011-01-03 12:59:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2294.0> 2011-01-03 12:59:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.93.0> 2011-01-03 12:59:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88347,410401}}, {active_buckets,["default"]}, {memory, [{total,13962064}, {processes,6731508}, {processes_used,6722676}, {system,7230556}, {atom,560301}, {atom_used,557531}, {binary,193224}, {code,4570913}, {ets,498724}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,359}, {memory_data,{4284698624,1051246592,{<0.228.0>,786208}}}, {disk_data, [{"C:\\",48162864,49},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3056926720}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{350400,0}}, {context_switches,{105849,0}}, {garbage_collection,{27639,119413106,0}}, {io,{{input,6751226},{output,4372895}}}, {reductions,{59258962,294253}}, {run_queue,0}, {runtime,{4165,16}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88348,346400}}, {active_buckets,["default"]}, {memory, [{total,13525608}, {processes,6196204}, {processes_used,6188012}, {system,7329404}, {atom,559325}, {atom_used,555631}, {binary,357512}, {code,4543239}, {ets,473924}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,290}, {memory_data,{4284698624,1090035712,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,36},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3051139072}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{280895,0}}, {context_switches,{49338,0}}, {garbage_collection,{10276,63045656,0}}, {io,{{input,9091100},{output,3377533}}}, {reductions,{17434810,192804}}, {run_queue,0}, {runtime,{3010,46}}]}]}] ERROR REPORT <0.2325.0> 2011-01-03 12:59:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2327.0> 2011-01-03 12:59:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.259.0> 2011-01-03 12:59:11 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 514 auth_errors 0 bucket_conns 13 bytes_read 400438407 bytes_written 5734504 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 138748 conn_yields 36 connection_structures 35 curr_connections 35 curr_items 138748 curr_items_tot 276327 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 297 ep_commit_time 0 ep_commit_time_total 98 ep_data_age 16 ep_data_age_highwat 28 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 11 ep_flush_duration_highwat 12 ep_flush_duration_total 95 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 11386 ep_io_num_read 0 ep_io_num_write 249328 ep_io_read_bytes 0 ep_io_write_bytes 352189354 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 415319481 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 0 ep_num_eject_failures 0 ep_num_eject_replicas 0 ep_num_expiry_pager_runs 0 ep_num_non_resident 0 ep_num_not_my_vbuckets 0 ep_num_pager_runs 0 ep_num_value_ejects 0 ep_oom_errors 0 ep_overhead 26790755 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 15614 ep_storage_age 13 ep_storage_age_highwat 26 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 415319481 ep_total_del_items 0 ep_total_enqueued 276369 ep_total_new_items 249328 ep_total_persisted 249328 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 442110236 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 513 tap_mutation_received 137579 tap_mutation_sent 137824 tap_opaque_received 513 tap_opaque_sent 1025 tap_vbucket_set_sent 1024 threads 4 time 1294088351 total_connections 548 uptime 365 version 1.4.4_304_g7d5a132 ERROR REPORT <0.2332.0> 2011-01-03 12:59:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2336.0> 2011-01-03 12:59:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 12:59:14 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2338.0> 2011-01-03 12:59:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2354.0> 2011-01-03 12:59:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2356.0> 2011-01-03 12:59:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2361.0> 2011-01-03 12:59:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2365.0> 2011-01-03 12:59:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2370.0> 2011-01-03 12:59:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2382.0> 2011-01-03 12:59:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2389.0> 2011-01-03 12:59:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2391.0> 2011-01-03 12:59:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2398.0> 2011-01-03 12:59:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2400.0> 2011-01-03 12:59:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2414.0> 2011-01-03 12:59:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2416.0> 2011-01-03 12:59:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2420.0> 2011-01-03 12:59:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2427.0> 2011-01-03 12:59:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2429.0> 2011-01-03 12:59:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2444.0> 2011-01-03 12:59:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2446.0> 2011-01-03 12:59:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 12:59:50 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2448.0> 2011-01-03 12:59:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2453.0> 2011-01-03 12:59:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2459.0> 2011-01-03 12:59:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2464.0> 2011-01-03 12:59:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2475.0> 2011-01-03 12:59:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2480.0> 2011-01-03 13:00:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2482.0> 2011-01-03 13:00:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2489.0> 2011-01-03 13:00:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2493.0> 2011-01-03 13:00:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.93.0> 2011-01-03 13:00:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88407,408401}}, {active_buckets,["default"]}, {memory, [{total,13900008}, {processes,6614948}, {processes_used,6606116}, {system,7285060}, {atom,560301}, {atom_used,557531}, {binary,212056}, {code,4570913}, {ets,534564}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,419}, {memory_data,{4284698624,1263738880,{<0.228.0>,786208}}}, {disk_data, [{"C:\\",48162864,50},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2844975104}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{410398,0}}, {context_switches,{123220,0}}, {garbage_collection,{29938,143784816,0}}, {io,{{input,7840432},{output,4874363}}}, {reductions,{79565799,230381}}, {run_queue,0}, {runtime,{5085,0}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88408,344400}}, {active_buckets,["default"]}, {memory, [{total,14203920}, {processes,6824348}, {processes_used,6816156}, {system,7379572}, {atom,559325}, {atom_used,555631}, {binary,373600}, {code,4543239}, {ets,507996}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,350}, {memory_data,{4284698624,1299959808,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,36},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2840260608}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{340893,0}}, {context_switches,{57542,0}}, {garbage_collection,{12735,83285257,0}}, {io,{{input,10394000},{output,3760919}}}, {reductions,{23015350,186251}}, {run_queue,0}, {runtime,{3822,16}}]}]}] ERROR REPORT <0.2507.0> 2011-01-03 13:00:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2512.0> 2011-01-03 13:00:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2514.0> 2011-01-03 13:00:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2521.0> 2011-01-03 13:00:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2525.0> 2011-01-03 13:00:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 13:00:17 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2539.0> 2011-01-03 13:00:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2541.0> 2011-01-03 13:00:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2546.0> 2011-01-03 13:00:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2550.0> 2011-01-03 13:00:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2555.0> 2011-01-03 13:00:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 13:00:26 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2569.0> 2011-01-03 13:00:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2574.0> 2011-01-03 13:00:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2576.0> 2011-01-03 13:00:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2580.0> 2011-01-03 13:00:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2585.0> 2011-01-03 13:00:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2598.0> 2011-01-03 13:00:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2603.0> 2011-01-03 13:00:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2605.0> 2011-01-03 13:00:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2612.0> 2011-01-03 13:00:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2614.0> 2011-01-03 13:00:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2628.0> 2011-01-03 13:00:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2633.0> 2011-01-03 13:00:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2635.0> 2011-01-03 13:00:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.259.0> 2011-01-03 13:00:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 514 auth_errors 0 bucket_conns 13 bytes_read 725350036 bytes_written 9818970 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 250542 conn_yields 50 connection_structures 35 curr_connections 35 curr_items 250542 curr_items_tot 500554 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 547 ep_commit_time 0 ep_commit_time_total 187 ep_data_age 16 ep_data_age_highwat 30 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 12 ep_flush_duration_highwat 14 ep_flush_duration_total 181 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 4776 ep_io_num_read 0 ep_io_num_write 473441 ep_io_read_bytes 0 ep_io_write_bytes 668861023 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 752332662 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 0 ep_num_eject_failures 0 ep_num_eject_replicas 0 ep_num_expiry_pager_runs 0 ep_num_non_resident 0 ep_num_not_my_vbuckets 0 ep_num_pager_runs 0 ep_num_value_ejects 0 ep_oom_errors 0 ep_overhead 26795853 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 22337 ep_storage_age 13 ep_storage_age_highwat 28 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 752332662 ep_total_del_items 0 ep_total_enqueued 500617 ep_total_new_items 473441 ep_total_persisted 473441 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 779128515 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 513 tap_mutation_received 250012 tap_mutation_sent 249166 tap_opaque_received 513 tap_opaque_sent 1025 tap_vbucket_set_sent 1024 threads 4 time 1294088450 total_connections 548 uptime 464 version 1.4.4_304_g7d5a132 ERROR REPORT <0.2640.0> 2011-01-03 13:00:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2644.0> 2011-01-03 13:00:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2649.0> 2011-01-03 13:00:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2662.0> 2011-01-03 13:00:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2664.0> 2011-01-03 13:01:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2669.0> 2011-01-03 13:01:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2673.0> 2011-01-03 13:01:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2678.0> 2011-01-03 13:01:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.93.0> 2011-01-03 13:01:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88467,406401}}, {active_buckets,["default"]}, {memory, [{total,14186896}, {processes,6838868}, {processes_used,6830036}, {system,7348028}, {atom,560301}, {atom_used,557531}, {binary,238040}, {code,4570913}, {ets,571492}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,479}, {memory_data,{4284698624,1476481024,{<0.228.0>,786208}}}, {disk_data, [{"C:\\",48162864,50},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2631122944}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{470396,0}}, {context_switches,{140212,0}}, {garbage_collection,{32304,167858938,0}}, {io,{{input,8219395},{output,5201242}}}, {reductions,{99891274,230616}}, {run_queue,0}, {runtime,{5896,0}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88468,342400}}, {active_buckets,["default"]}, {memory, [{total,14412160}, {processes,7026484}, {processes_used,7018292}, {system,7385676}, {atom,559325}, {atom_used,555631}, {binary,345712}, {code,4543239}, {ets,542012}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,410}, {memory_data,{4284698624,1512488960,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,37},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2630938624}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{400891,0}}, {context_switches,{65744,0}}, {garbage_collection,{15258,103470652,0}}, {io,{{input,11683562},{output,4160837}}}, {reductions,{28641455,197626}}, {run_queue,0}, {runtime,{4243,0}}]}]}] ERROR REPORT <0.2692.0> 2011-01-03 13:01:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2697.0> 2011-01-03 13:01:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2699.0> 2011-01-03 13:01:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2706.0> 2011-01-03 13:01:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2708.0> 2011-01-03 13:01:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2722.0> 2011-01-03 13:01:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2724.0> 2011-01-03 13:01:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2726.0> 2011-01-03 13:01:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2733.0> 2011-01-03 13:01:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2735.0> 2011-01-03 13:01:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 13:01:25 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2750.0> 2011-01-03 13:01:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2754.0> 2011-01-03 13:01:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2759.0> 2011-01-03 13:01:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2763.0> 2011-01-03 13:01:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2768.0> 2011-01-03 13:01:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2779.0> 2011-01-03 13:01:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 13:01:40 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2784.0> 2011-01-03 13:01:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2786.0> 2011-01-03 13:01:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2802.0> 2011-01-03 13:01:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2804.0> 2011-01-03 13:01:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 13:01:47 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.2816.0> 2011-01-03 13:01:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2818.0> 2011-01-03 13:01:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2823.0> 2011-01-03 13:01:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2825.0> 2011-01-03 13:01:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2832.0> 2011-01-03 13:01:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2834.0> 2011-01-03 13:01:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2857.0> 2011-01-03 13:01:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2877.0> 2011-01-03 13:02:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2911.0> 2011-01-03 13:02:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2942.0> 2011-01-03 13:02:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.2974.0> 2011-01-03 13:02:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.93.0> 2011-01-03 13:02:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88527,404401}}, {active_buckets,["default"]}, {memory, [{total,27000096}, {processes,19603044}, {processes_used,19594420}, {system,7397052}, {atom,560301}, {atom_used,557531}, {binary,243944}, {code,4570913}, {ets,605652}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,539}, {memory_data,{4284698624,1684643840,{<0.228.0>,786208}}}, {disk_data, [{"C:\\",48162864,51},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2371895296}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{530394,0}}, {context_switches,{156961,0}}, {garbage_collection,{34452,191074986,0}}, {io,{{input,8529944},{output,5528788}}}, {reductions,{119901238,252724}}, {run_queue,0}, {runtime,{6708,16}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88528,340400}}, {active_buckets,["default"]}, {memory, [{total,14154056}, {processes,6707236}, {processes_used,6699044}, {system,7446820}, {atom,559325}, {atom_used,555631}, {binary,367776}, {code,4543239}, {ets,580956}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,470}, {memory_data,{4284698624,1722933248,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,37},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2365616128}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{460889,0}}, {context_switches,{74361,0}}, {garbage_collection,{17891,123956464,0}}, {io,{{input,13727874},{output,4782221}}}, {reductions,{34299295,186770}}, {run_queue,0}, {runtime,{4524,16}}]}]}] ERROR REPORT <0.3066.0> 2011-01-03 13:02:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3071.0> 2011-01-03 13:02:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3073.0> 2011-01-03 13:02:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3077.0> 2011-01-03 13:02:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3082.0> 2011-01-03 13:02:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3093.0> 2011-01-03 13:02:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3098.0> 2011-01-03 13:02:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3100.0> 2011-01-03 13:02:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3107.0> 2011-01-03 13:02:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3109.0> 2011-01-03 13:02:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3124.0> 2011-01-03 13:02:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3126.0> 2011-01-03 13:02:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.259.0> 2011-01-03 13:02:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 514 auth_errors 0 bucket_conns 213 bytes_read 1368967525 bytes_written 16640150 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 476131 conn_yields 118 connection_structures 235 curr_connections 235 curr_items 476075 curr_items_tot 943967 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 807 ep_commit_time 0 ep_commit_time_total 279 ep_data_age 41 ep_data_age_highwat 45 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 15 ep_flush_duration_highwat 15 ep_flush_duration_total 257 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 21364 ep_io_num_read 0 ep_io_num_write 713011 ep_io_read_bytes 0 ep_io_write_bytes 1007586576 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 1420895470 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 0 ep_num_eject_failures 0 ep_num_eject_replicas 0 ep_num_expiry_pager_runs 0 ep_num_non_resident 0 ep_num_not_my_vbuckets 0 ep_num_pager_runs 0 ep_num_value_ejects 0 ep_oom_errors 0 ep_overhead 36717743 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 209697 ep_storage_age 41 ep_storage_age_highwat 43 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 1420895470 ep_total_del_items 0 ep_total_enqueued 944150 ep_total_new_items 712992 ep_total_persisted 713011 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 1457613213 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 513 tap_mutation_received 467964 tap_mutation_sent 471923 tap_opaque_received 513 tap_opaque_sent 1025 tap_vbucket_set_sent 1024 threads 4 time 1294088551 total_connections 748 uptime 565 version 1.4.4_304_g7d5a132 ERROR REPORT <0.3131.0> 2011-01-03 13:02:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3137.0> 2011-01-03 13:02:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3142.0> 2011-01-03 13:02:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 13:02:38 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.3038.0> 2011-01-03 13:02:39 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.3026.0> 2011-01-03 13:02:39 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.3153.0> 2011-01-03 13:02:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.3016.0> 2011-01-03 13:02:40 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.3155.0> 2011-01-03 13:02:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3160.0> 2011-01-03 13:02:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3164.0> 2011-01-03 13:02:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3169.0> 2011-01-03 13:02:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.3007.0> 2011-01-03 13:02:45 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.2998.0> 2011-01-03 13:02:46 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.2989.0> 2011-01-03 13:02:46 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.2980.0> 2011-01-03 13:02:47 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.2969.0> 2011-01-03 13:02:47 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.2957.0> 2011-01-03 13:02:47 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.2948.0> 2011-01-03 13:02:48 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.2935.0> 2011-01-03 13:02:48 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.2926.0> 2011-01-03 13:02:49 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.3181.0> 2011-01-03 13:02:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.2917.0> 2011-01-03 13:02:49 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.3183.0> 2011-01-03 13:02:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.2906.0> 2011-01-03 13:02:50 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.3188.0> 2011-01-03 13:02:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.2892.0> 2011-01-03 13:02:52 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.3190.0> 2011-01-03 13:02:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.2883.0> 2011-01-03 13:02:53 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.2872.0> 2011-01-03 13:02:53 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.2863.0> 2011-01-03 13:02:53 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.3197.0> 2011-01-03 13:02:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.2852.0> 2011-01-03 13:02:54 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.3199.0> 2011-01-03 13:02:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3213.0> 2011-01-03 13:02:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3215.0> 2011-01-03 13:03:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 13:03:01 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.3220.0> 2011-01-03 13:03:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3224.0> 2011-01-03 13:03:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3228.0> 2011-01-03 13:03:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.93.0> 2011-01-03 13:03:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88587,402401}}, {active_buckets,["default"]}, {memory, [{total,14731832}, {processes,7271364}, {processes_used,7250828}, {system,7460468}, {atom,560301}, {atom_used,557531}, {binary,278912}, {code,4570913}, {ets,641596}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,599}, {memory_data,{4284698624,2043715584,{<0.2792.0>,971680}}}, {disk_data, [{"C:\\",48162864,51},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1701363712}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{590392,0}}, {context_switches,{183448,0}}, {garbage_collection,{38500,240912796,0}}, {io,{{input,9631267},{output,6906433}}}, {reductions,{146733670,253023}}, {run_queue,0}, {runtime,{7893,0}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88588,338400}}, {active_buckets,["default"]}, {memory, [{total,13972736}, {processes,6496100}, {processes_used,6488316}, {system,7476636}, {atom,559325}, {atom_used,555631}, {binary,361920}, {code,4543239}, {ets,615212}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,530}, {memory_data,{4284698624,2167726080,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,38},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1700143104}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{520903,0}}, {context_switches,{83514,0}}, {garbage_collection,{20794,144324845,0}}, {io,{{input,15071529},{output,5182012}}}, {reductions,{40023710,187652}}, {run_queue,0}, {runtime,{4976,0}}]}]}] ERROR REPORT <0.3245.0> 2011-01-03 13:03:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3247.0> 2011-01-03 13:03:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3261.0> 2011-01-03 13:03:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3265.0> 2011-01-03 13:03:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3270.0> 2011-01-03 13:03:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3281.0> 2011-01-03 13:03:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3286.0> 2011-01-03 13:03:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3288.0> 2011-01-03 13:03:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3304.0> 2011-01-03 13:03:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3306.0> 2011-01-03 13:03:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3318.0> 2011-01-03 13:03:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3323.0> 2011-01-03 13:03:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3325.0> 2011-01-03 13:03:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3332.0> 2011-01-03 13:03:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3336.0> 2011-01-03 13:03:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3350.0> 2011-01-03 13:03:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3361.0> 2011-01-03 13:03:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3366.0> 2011-01-03 13:03:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3370.0> 2011-01-03 13:03:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:03:45 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3375.0> 2011-01-03 13:03:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:03:46 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:03:48 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3387.0> 2011-01-03 13:03:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3389.0> 2011-01-03 13:03:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:03:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:03:51 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.3394.0> 2011-01-03 13:03:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:03:51 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3396.0> 2011-01-03 13:03:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:03:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3403.0> 2011-01-03 13:03:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:03:55 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3405.0> 2011-01-03 13:03:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:03:56 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:03:57 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:03:58 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3452.0> 2011-01-03 13:03:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:00 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3457.0> 2011-01-03 13:04:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3459.0> 2011-01-03 13:04:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:02 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3466.0> 2011-01-03 13:04:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:04 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3468.0> 2011-01-03 13:04:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 13:04:05 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:04:06 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:04:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:04:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88647,400401}}, {active_buckets,["default"]}, {memory, [{total,23145528}, {processes,15639860}, {processes_used,15624140}, {system,7505668}, {atom,560301}, {atom_used,557531}, {binary,300344}, {code,4570913}, {ets,661508}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,659}, {memory_data,{4284698624,2654203904,{<0.2792.0>,971680}}}, {disk_data, [{"C:\\",48162864,52},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1218867200}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{650390,0}}, {context_switches,{202898,0}}, {garbage_collection,{41626,271195975,0}}, {io,{{input,9995611},{output,7337698}}}, {reductions,{168608906,1038009}}, {run_queue,0}, {runtime,{8876,109}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88648,352400}}, {active_buckets,["default"]}, {memory, [{total,14090288}, {processes,6577668}, {processes_used,6569476}, {system,7512620}, {atom,559325}, {atom_used,555631}, {binary,362344}, {code,4543239}, {ets,649332}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,590}, {memory_data,{4284698624,2749149184,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,38},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1113911296}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{580901,0}}, {context_switches,{92213,0}}, {garbage_collection,{23589,164689654,0}}, {io,{{input,16381861},{output,5575784}}}, {reductions,{45649621,222495}}, {run_queue,0}, {runtime,{5288,16}}]}]}] INFO REPORT <0.105.0> 2011-01-03 13:04:09 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3487.0> 2011-01-03 13:04:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3489.0> 2011-01-03 13:04:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:11 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.259.0> 2011-01-03 13:04:11 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 514 auth_errors 0 bucket_conns 93 bytes_read 2356027277 bytes_written 30239984 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 842973 conn_yields 118 connection_structures 235 curr_connections 115 curr_items 842704 curr_items_tot 1541203 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 1103 ep_commit_time 0 ep_commit_time_total 372 ep_data_age 96 ep_data_age_highwat 128 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 35 ep_flush_duration_highwat 35 ep_flush_duration_total 292 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 23260 ep_io_num_read 0 ep_io_num_write 1005372 ep_io_read_bytes 0 ep_io_write_bytes 1421639906 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2219504477 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 0 ep_num_eject_failures 0 ep_num_eject_replicas 73272 ep_num_expiry_pager_runs 0 ep_num_non_resident 73272 ep_num_not_my_vbuckets 0 ep_num_pager_runs 0 ep_num_value_ejects 73272 ep_oom_errors 0 ep_overhead 51123325 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 512860 ep_storage_age 93 ep_storage_age_highwat 126 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 2322085277 ep_total_del_items 0 ep_total_enqueued 1541576 ep_total_new_items 1005263 ep_total_persisted 1005372 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2270627802 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 513 tap_mutation_received 781188 tap_mutation_sent 767833 tap_opaque_received 513 tap_opaque_sent 1025 tap_vbucket_set_sent 1024 threads 4 time 1294088651 total_connections 818 uptime 665 version 1.4.4_304_g7d5a132 ERROR REPORT <0.3494.0> 2011-01-03 13:04:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:04:14 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3498.0> 2011-01-03 13:04:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:15 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3503.0> 2011-01-03 13:04:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:04:18 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:04:19 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3514.0> 2011-01-03 13:04:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3516.0> 2011-01-03 13:04:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:22 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3521.0> 2011-01-03 13:04:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:23 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3525.0> 2011-01-03 13:04:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:24 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3530.0> 2011-01-03 13:04:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:26 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:04:28 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3542.0> 2011-01-03 13:04:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:30 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3547.0> 2011-01-03 13:04:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:31 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3549.0> 2011-01-03 13:04:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:33 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3556.0> 2011-01-03 13:04:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3558.0> 2011-01-03 13:04:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:35 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:04:37 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:04:39 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3574.0> 2011-01-03 13:04:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:41 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3576.0> 2011-01-03 13:04:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 13:04:41 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:04:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3581.0> 2011-01-03 13:04:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:43 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3585.0> 2011-01-03 13:04:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3590.0> 2011-01-03 13:04:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:47 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:04:48 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3602.0> 2011-01-03 13:04:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3604.0> 2011-01-03 13:04:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3606.0> 2011-01-03 13:04:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3611.0> 2011-01-03 13:04:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3615.0> 2011-01-03 13:04:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3620.0> 2011-01-03 13:04:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:04:55 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:04:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:04:58 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3631.0> 2011-01-03 13:04:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:00 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3636.0> 2011-01-03 13:05:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:01 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:02 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3638.0> 2011-01-03 13:05:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3645.0> 2011-01-03 13:05:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:04 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3647.0> 2011-01-03 13:05:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:05 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:05:07 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.93.0> 2011-01-03 13:05:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88707,414401}}, {active_buckets,["default"]}, {memory, [{total,21303184}, {processes,13798644}, {processes_used,13782420}, {system,7504540}, {atom,560301}, {atom_used,557531}, {binary,265672}, {code,4570913}, {ets,695556}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,719}, {memory_data,{4284698624,3119693824,{<0.3445.0>,1386364}}}, {disk_data, [{"C:\\",48162864,52},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,855961600}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{710404,0}}, {context_switches,{222039,0}}, {garbage_collection,{44444,301824069,0}}, {io,{{input,10347704},{output,7663883}}}, {reductions,{190492319,363027}}, {run_queue,0}, {runtime,{9781,0}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88708,350400}}, {active_buckets,["default"]}, {memory, [{total,13513472}, {processes,5959764}, {processes_used,5951068}, {system,7553708}, {atom,559325}, {atom_used,555631}, {binary,371352}, {code,4543239}, {ets,685212}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,650}, {memory_data,{4284698624,3311009792,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,39},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,622383104}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{640899,0}}, {context_switches,{101245,0}}, {garbage_collection,{26774,185230376,0}}, {io,{{input,18456246},{output,6713846}}}, {reductions,{51351818,191156}}, {run_queue,0}, {runtime,{5694,0}}]}]}] INFO REPORT <0.43.0> 2011-01-03 13:05:08 =============================================================================== alarm_handler {set,{system_memory_high_watermark,[]}} INFO REPORT <0.105.0> 2011-01-03 13:05:08 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3679.0> 2011-01-03 13:05:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:09 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3682.0> 2011-01-03 13:05:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:11 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3689.0> 2011-01-03 13:05:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3696.0> 2011-01-03 13:05:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:15 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3705.0> 2011-01-03 13:05:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:18 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3722.0> 2011-01-03 13:05:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:20 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3733.0> 2011-01-03 13:05:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3738.0> 2011-01-03 13:05:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:22 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3742.0> 2011-01-03 13:05:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:24 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3747.0> 2011-01-03 13:05:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:26 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:27 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:29 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3763.0> 2011-01-03 13:05:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:31 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3770.0> 2011-01-03 13:05:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:32 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3773.0> 2011-01-03 13:05:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:33 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3782.0> 2011-01-03 13:05:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:35 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3785.0> 2011-01-03 13:05:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:37 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:38 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:39 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3802.0> 2011-01-03 13:05:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:41 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3812.0> 2011-01-03 13:05:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3815.0> 2011-01-03 13:05:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3824.0> 2011-01-03 13:05:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3827.0> 2011-01-03 13:05:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:45 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:47 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:48 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3846.0> 2011-01-03 13:05:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3848.0> 2011-01-03 13:05:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3852.0> 2011-01-03 13:05:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.259.0> 2011-01-03 13:05:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 514 auth_errors 0 bucket_conns 93 bytes_read 3228967497 bytes_written 48508525 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 1251493 conn_yields 120 connection_structures 235 curr_connections 115 curr_items 1250857 curr_items_tot 1970509 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 1390 ep_commit_time 0 ep_commit_time_total 467 ep_data_age 129 ep_data_age_highwat 193 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 98 ep_flush_duration_highwat 98 ep_flush_duration_total 390 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 297938 ep_io_num_read 0 ep_io_num_write 1290631 ep_io_read_bytes 0 ep_io_write_bytes 1825483927 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2439568851 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 122791 ep_num_eject_failures 154055 ep_num_eject_replicas 256251 ep_num_expiry_pager_runs 0 ep_num_non_resident 379041 ep_num_not_my_vbuckets 0 ep_num_pager_runs 1 ep_num_value_ejects 379061 ep_oom_errors 0 ep_overhead 58163206 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 382436 ep_storage_age 126 ep_storage_age_highwat 190 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 2970254251 ep_total_del_items 0 ep_total_enqueued 1971090 ep_total_new_items 1290411 ep_total_persisted 1290631 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2497732057 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 513 tap_mutation_received 974438 tap_mutation_sent 1027683 tap_opaque_received 513 tap_opaque_sent 1025 tap_vbucket_set_sent 1024 threads 4 time 1294088751 total_connections 818 uptime 765 version 1.4.4_304_g7d5a132 INFO REPORT <0.105.0> 2011-01-03 13:05:52 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3858.0> 2011-01-03 13:05:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3864.0> 2011-01-03 13:05:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:55 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3870.0> 2011-01-03 13:05:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:05:56 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:58 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:05:59 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3885.0> 2011-01-03 13:05:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:01 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:06:01 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.3895.0> 2011-01-03 13:06:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:02 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3898.0> 2011-01-03 13:06:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:03 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3904.0> 2011-01-03 13:06:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:04 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3910.0> 2011-01-03 13:06:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:06 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:06:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:06:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88767,412401}}, {active_buckets,["default"]}, {memory, [{total,22257800}, {processes,14694228}, {processes_used,14679124}, {system,7563572}, {atom,560301}, {atom_used,557531}, {binary,290264}, {code,4570913}, {ets,731364}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,779}, {memory_data,{4284698624,3495477248,{<0.3563.0>,1386364}}}, {disk_data, [{"C:\\",48162864,53},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,594141184}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{770402,0}}, {context_switches,{245483,0}}, {garbage_collection,{48051,343695248,0}}, {io,{{input,13104963},{output,9433864}}}, {reductions,{214989032,445791}}, {run_queue,0}, {runtime,{10888,31}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88768,363400}}, {active_buckets,["default"]}, {memory, [{total,14039304}, {processes,6503468}, {processes_used,6495276}, {system,7535836}, {atom,559325}, {atom_used,555631}, {binary,347896}, {code,4543239}, {ets,690092}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,710}, {memory_data,{4284698624,3730006016,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,40},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,320471040}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{700902,0}}, {context_switches,{110091,0}}, {garbage_collection,{29766,206295403,0}}, {io,{{input,19788706},{output,7122992}}}, {reductions,{57204831,200396}}, {run_queue,0}, {runtime,{5912,0}}]}]}] INFO REPORT <0.105.0> 2011-01-03 13:06:09 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3928.0> 2011-01-03 13:06:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:10 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3937.0> 2011-01-03 13:06:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:12 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3940.0> 2011-01-03 13:06:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3949.0> 2011-01-03 13:06:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:15 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3952.0> 2011-01-03 13:06:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:06:17 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:06:19 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3970.0> 2011-01-03 13:06:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3977.0> 2011-01-03 13:06:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.3983.0> 2011-01-03 13:06:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:22 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3989.0> 2011-01-03 13:06:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:24 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.3995.0> 2011-01-03 13:06:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:26 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:06:27 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:06:28 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4011.0> 2011-01-03 13:06:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:30 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4015.0> 2011-01-03 13:06:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:31 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4021.0> 2011-01-03 13:06:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.4027.0> 2011-01-03 13:06:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:34 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:06:35 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4033.0> 2011-01-03 13:06:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:36 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:06:38 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4048.0> 2011-01-03 13:06:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:40 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4058.0> 2011-01-03 13:06:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:41 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4063.0> 2011-01-03 13:06:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4072.0> 2011-01-03 13:06:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4075.0> 2011-01-03 13:06:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:47 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:06:48 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4091.0> 2011-01-03 13:06:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:49 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:06:49 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.4096.0> 2011-01-03 13:06:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4100.0> 2011-01-03 13:06:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:52 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4106.0> 2011-01-03 13:06:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4112.0> 2011-01-03 13:06:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4115.0> 2011-01-03 13:06:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:06:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:06:58 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:06:59 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4133.0> 2011-01-03 13:06:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:00 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4141.0> 2011-01-03 13:07:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:01 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4147.0> 2011-01-03 13:07:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:03 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4153.0> 2011-01-03 13:07:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:04 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4159.0> 2011-01-03 13:07:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:06 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:07:08 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:07:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88827,410401}}, {active_buckets,["default"]}, {memory, [{total,21789776}, {processes,14219636}, {processes_used,14204532}, {system,7570140}, {atom,560301}, {atom_used,557531}, {binary,263568}, {code,4570913}, {ets,765532}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,839}, {memory_data,{4284698624,3747790848,{<0.2792.0>,971680}}}, {disk_data, [{"C:\\",48162864,53},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,340926464}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{830400,0}}, {context_switches,{266984,0}}, {garbage_collection,{51375,385795063,0}}, {io,{{input,14675778},{output,9891567}}}, {reductions,{238775956,444441}}, {run_queue,0}, {runtime,{11793,15}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88828,377401}}, {active_buckets,["default"]}, {memory, [{total,14526408}, {processes,6939236}, {processes_used,6931044}, {system,7587172}, {atom,559325}, {atom_used,555631}, {binary,365136}, {code,4543239}, {ets,724156}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,770}, {memory_data,{4284698624,4028600320,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,40},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,79532032}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{760900,0}}, {context_switches,{118695,0}}, {garbage_collection,{32390,227615860,0}}, {io,{{input,21128030},{output,7521851}}}, {reductions,{62907205,196360}}, {run_queue,0}, {runtime,{6162,0}}]}]}] ERROR REPORT <0.4177.0> 2011-01-03 13:07:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:09 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:07:11 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' ERROR REPORT <0.4184.0> 2011-01-03 13:07:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:12 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4187.0> 2011-01-03 13:07:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4198.0> 2011-01-03 13:07:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:14 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4201.0> 2011-01-03 13:07:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:15 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:07:17 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:07:19 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4216.0> 2011-01-03 13:07:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:20 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4227.0> 2011-01-03 13:07:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.4230.0> 2011-01-03 13:07:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:22 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4239.0> 2011-01-03 13:07:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:24 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4242.0> 2011-01-03 13:07:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:25 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:07:27 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:07:28 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4261.0> 2011-01-03 13:07:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:30 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4265.0> 2011-01-03 13:07:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.259.0> 2011-01-03 13:07:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 514 auth_errors 0 bucket_conns 93 bytes_read 4015681337 bytes_written 65226927 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 1659430 conn_yields 123 connection_structures 235 curr_connections 115 curr_items 1658254 curr_items_tot 2377906 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 1683 ep_commit_time 1 ep_commit_time_total 555 ep_data_age 201 ep_data_age_highwat 293 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 98 ep_flush_duration_highwat 98 ep_flush_duration_total 390 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 6938 ep_io_num_read 0 ep_io_num_write 1581629 ep_io_read_bytes 0 ep_io_write_bytes 2237438266 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2389148366 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 368770 ep_num_eject_failures 698164 ep_num_eject_replicas 485627 ep_num_expiry_pager_runs 0 ep_num_non_resident 854396 ep_num_not_my_vbuckets 0 ep_num_pager_runs 3 ep_num_value_ejects 854490 ep_oom_errors 0 ep_overhead 63886911 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 790108 ep_storage_age 198 ep_storage_age_highwat 290 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 3585434366 ep_total_del_items 0 ep_total_enqueued 2378762 ep_total_new_items 1581329 ep_total_persisted 1581629 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2453035277 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 513 tap_mutation_received 1109038 tap_mutation_sent 1199693 tap_opaque_received 513 tap_opaque_sent 1025 tap_vbucket_set_sent 1024 threads 4 time 1294088851 total_connections 818 uptime 865 version 1.4.4_304_g7d5a132 ERROR REPORT <0.4271.0> 2011-01-03 13:07:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:32 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4277.0> 2011-01-03 13:07:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:34 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4283.0> 2011-01-03 13:07:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:36 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:07:37 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:07:38 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:07:38 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4298.0> 2011-01-03 13:07:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:40 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.3441.0> 2011-01-03 13:07:40 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.4308.0> 2011-01-03 13:07:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:41 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.3432.0> 2011-01-03 13:07:42 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.4311.0> 2011-01-03 13:07:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4319.0> 2011-01-03 13:07:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.3420.0> 2011-01-03 13:07:44 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.105.0> 2011-01-03 13:07:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4325.0> 2011-01-03 13:07:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.3411.0> 2011-01-03 13:07:46 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.105.0> 2011-01-03 13:07:46 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:07:47 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.3356.0> 2011-01-03 13:07:47 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.105.0> 2011-01-03 13:07:48 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.3294.0> 2011-01-03 13:07:49 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.4341.0> 2011-01-03 13:07:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.4343.0> 2011-01-03 13:07:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.3256.0> 2011-01-03 13:07:50 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.105.0> 2011-01-03 13:07:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4350.0> 2011-01-03 13:07:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.2792.0> 2011-01-03 13:07:51 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.4353.0> 2011-01-03 13:07:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:52 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.1924.0> 2011-01-03 13:07:53 =============================================================================== menelaus_web streaming socket closed by client ERROR REPORT <0.4362.0> 2011-01-03 13:07:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4365.0> 2011-01-03 13:07:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:07:56 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:07:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:07:59 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4383.0> 2011-01-03 13:07:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:00 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4390.0> 2011-01-03 13:08:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:02 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4396.0> 2011-01-03 13:08:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:03 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4402.0> 2011-01-03 13:08:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:05 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4408.0> 2011-01-03 13:08:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:08:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88887,408401}}, {active_buckets,["default"]}, {memory, [{total,14500096}, {processes,6952124}, {processes_used,6931476}, {system,7547972}, {atom,560301}, {atom_used,557531}, {binary,247536}, {code,4570913}, {ets,763940}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,899}, {memory_data,{4284698624,4002705408,{<0.2792.0>,971680}}}, {disk_data, [{"C:\\",48162864,54},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,155570176}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{890398,0}}, {context_switches,{288312,0}}, {garbage_collection,{54912,427121605,0}}, {io,{{input,16280663},{output,10344292}}}, {reductions,{262433088,289476}}, {run_queue,0}, {runtime,{12854,0}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88888,375402}}, {active_buckets,["default"]}, {memory, [{total,14695904}, {processes,7073156}, {processes_used,7064964}, {system,7622748}, {atom,559325}, {atom_used,555631}, {binary,364904}, {code,4543239}, {ets,760172}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,830}, {memory_data,{4284698624,4237778944,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,41},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,55386112}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{820898,0}}, {context_switches,{127582,0}}, {garbage_collection,{35094,249149750,0}}, {io,{{input,23206793},{output,8358262}}}, {reductions,{68673166,191438}}, {run_queue,0}, {runtime,{6458,0}}]}]}] INFO REPORT <0.105.0> 2011-01-03 13:08:08 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4441.0> 2011-01-03 13:08:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:10 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4445.0> 2011-01-03 13:08:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:11 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4451.0> 2011-01-03 13:08:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4457.0> 2011-01-03 13:08:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:14 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4465.0> 2011-01-03 13:08:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:08:17 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4480.0> 2011-01-03 13:08:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:19 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4490.0> 2011-01-03 13:08:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4493.0> 2011-01-03 13:08:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:23 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4502.0> 2011-01-03 13:08:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.85.0> 2011-01-03 13:08:25 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:08:25 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4505.0> 2011-01-03 13:08:25 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:27 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:08:29 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4524.0> 2011-01-03 13:08:29 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:30 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4528.0> 2011-01-03 13:08:31 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:31 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4534.0> 2011-01-03 13:08:32 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:33 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4540.0> 2011-01-03 13:08:34 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:35 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4543.0> 2011-01-03 13:08:35 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:36 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:08:38 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4561.0> 2011-01-03 13:08:39 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:40 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4565.0> 2011-01-03 13:08:41 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:41 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4574.0> 2011-01-03 13:08:42 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:43 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4580.0> 2011-01-03 13:08:44 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:45 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4586.0> 2011-01-03 13:08:45 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:46 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:08:47 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4604.0> 2011-01-03 13:08:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:49 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4606.0> 2011-01-03 13:08:49 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4613.0> 2011-01-03 13:08:51 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.4616.0> 2011-01-03 13:08:52 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4625.0> 2011-01-03 13:08:54 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4628.0> 2011-01-03 13:08:55 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:08:55 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:08:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:08:59 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4646.0> 2011-01-03 13:08:59 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:00 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4650.0> 2011-01-03 13:09:01 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:02 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4656.0> 2011-01-03 13:09:02 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** ERROR REPORT <0.4665.0> 2011-01-03 13:09:04 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:04 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:09:05 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:09:05 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4668.0> 2011-01-03 13:09:05 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:09:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,88947,406401}}, {active_buckets,["default"]}, {memory, [{total,14422960}, {processes,6896940}, {processes_used,6876292}, {system,7526020}, {atom,560301}, {atom_used,557531}, {binary,191232}, {code,4570913}, {ets,799764}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,959}, {memory_data,{4284698624,4128333824,{<0.4189.0>,786900}}}, {disk_data, [{"C:\\",48162864,54},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,150798336}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{950396,0}}, {context_switches,{307908,0}}, {garbage_collection,{58635,461830637,0}}, {io,{{input,18614451},{output,11538987}}}, {reductions,{284430798,291951}}, {run_queue,0}, {runtime,{13852,0}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,88948,373402}}, {active_buckets,["default"]}, {memory, [{total,14699544}, {processes,7042572}, {processes_used,7034380}, {system,7656972}, {atom,559325}, {atom_used,555631}, {binary,365240}, {code,4543239}, {ets,794060}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,890}, {memory_data,{4284698624,4214956032,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,41},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,62357504}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{880896,0}}, {context_switches,{136099,0}}, {garbage_collection,{38085,270653643,0}}, {io,{{input,24535940},{output,8778566}}}, {reductions,{74441734,197835}}, {run_queue,0}, {runtime,{6786,47}}]}]}] INFO REPORT <0.105.0> 2011-01-03 13:09:08 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4689.0> 2011-01-03 13:09:09 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:10 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4693.0> 2011-01-03 13:09:11 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:11 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.259.0> 2011-01-03 13:09:11 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 514 auth_errors 0 bucket_conns 3 bytes_read 4229220526 bytes_written 71536265 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 1725729 conn_yields 126 connection_structures 235 curr_connections 25 curr_items 1724458 curr_items_tot 2444110 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 1943 ep_commit_time 0 ep_commit_time_total 640 ep_data_age 228 ep_data_age_highwat 295 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 192 ep_flush_duration_highwat 192 ep_flush_duration_total 582 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 554467 ep_io_num_read 0 ep_io_num_write 1840566 ep_io_read_bytes 0 ep_io_write_bytes 2604111927 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2474188018 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 368744 ep_num_eject_failures 698164 ep_num_eject_replicas 496278 ep_num_expiry_pager_runs 0 ep_num_non_resident 865021 ep_num_not_my_vbuckets 0 ep_num_pager_runs 3 ep_num_value_ejects 865141 ep_oom_errors 0 ep_overhead 54587198 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 49901 ep_storage_age 225 ep_storage_age_highwat 292 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 3685385418 ep_total_del_items 0 ep_total_enqueued 2445022 ep_total_new_items 1840087 ep_total_persisted 1840566 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2528775216 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 513 tap_mutation_received 1189758 tap_mutation_sent 1429153 tap_opaque_received 513 tap_opaque_sent 1025 tap_vbucket_set_sent 1024 threads 4 time 1294088950 total_connections 818 uptime 964 version 1.4.4_304_g7d5a132 ERROR REPORT <0.4699.0> 2011-01-03 13:09:12 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:12 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4705.0> 2011-01-03 13:09:14 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:14 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4711.0> 2011-01-03 13:09:15 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:17 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:19 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4728.0> 2011-01-03 13:09:19 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:20 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4735.0> 2011-01-03 13:09:21 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4741.0> 2011-01-03 13:09:22 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:23 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs ERROR REPORT <0.4750.0> 2011-01-03 13:09:24 =============================================================================== ** Connection attempt from disallowed node 'ns_1@10.2.1.102' ** INFO REPORT <0.105.0> 2011-01-03 13:09:25 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:09:25 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:09:26 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:27 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:29 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:09:30 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:09:30 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:32 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:34 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:36 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:38 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:40 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:43 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:46 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:47 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:49 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:51 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:55 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:58 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:09:59 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:01 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:03 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:04 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:06 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:10:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89007,404401}}, {active_buckets,["default"]}, {memory, [{total,13845392}, {processes,6327812}, {processes_used,6307164}, {system,7517580}, {atom,560301}, {atom_used,557531}, {binary,184376}, {code,4570913}, {ets,798212}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1019}, {memory_data,{4284698624,4134535168,{<0.228.0>,786208}}}, {disk_data, [{"C:\\",48162864,55},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,145440768}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1010393,0}}, {context_switches,{326966,0}}, {garbage_collection,{62156,496415334,0}}, {io,{{input,20207629},{output,11994425}}}, {reductions,{306497800,3510475}}, {run_queue,0}, {runtime,{14866,140}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89008,371402}}, {active_buckets,["default"]}, {memory, [{total,14897928}, {processes,7230444}, {processes_used,7222252}, {system,7667484}, {atom,559325}, {atom_used,555631}, {binary,375280}, {code,4543239}, {ets,793028}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,950}, {memory_data,{4284698624,4224675840,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,42},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,45428736}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{940894,0}}, {context_switches,{144392,0}}, {garbage_collection,{41344,292214910,0}}, {io,{{input,25838183},{output,9177007}}}, {reductions,{80235875,1032102}}, {run_queue,0}, {runtime,{7534,124}}]}]}] INFO REPORT <0.105.0> 2011-01-03 13:10:08 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:11 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:12 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:15 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:18 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:19 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:10:20 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:10:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:23 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:24 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:25 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:27 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:29 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:31 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:33 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:34 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:37 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:38 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:40 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:41 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:45 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:10:48 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:10:48 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:51 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.259.0> 2011-01-03 13:10:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 514 auth_errors 0 bucket_conns 3 bytes_read 4619164813 bytes_written 76782942 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 1725729 conn_yields 164 connection_structures 235 curr_connections 25 curr_items 1724458 curr_items_tot 2618395 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 2224 ep_commit_time 0 ep_commit_time_total 737 ep_data_age 205 ep_data_age_highwat 393 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 192 ep_flush_duration_highwat 192 ep_flush_duration_total 582 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 274467 ep_io_num_read 0 ep_io_num_write 2120565 ep_io_read_bytes 0 ep_io_write_bytes 3000616467 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2417638517 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 549619 ep_num_eject_failures 972642 ep_num_eject_replicas 543823 ep_num_expiry_pager_runs 0 ep_num_non_resident 1093361 ep_num_not_my_vbuckets 0 ep_num_pager_runs 4 ep_num_value_ejects 1093561 ep_oom_errors 0 ep_overhead 49505808 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 224284 ep_storage_age 205 ep_storage_age_highwat 390 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 3948623917 ep_total_del_items 0 ep_total_enqueued 2619405 ep_total_new_items 2119916 ep_total_persisted 2120565 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2467144325 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 513 tap_mutation_received 1457558 tap_mutation_sent 1769249 tap_opaque_received 513 tap_opaque_sent 1025 tap_vbucket_set_sent 1024 threads 4 time 1294089051 total_connections 818 uptime 1065 version 1.4.4_304_g7d5a132 INFO REPORT <0.105.0> 2011-01-03 13:10:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:10:54 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:10:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:56 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:10:59 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:00 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:01 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:03 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:05 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:06 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:11:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89067,402401}}, {active_buckets,["default"]}, {memory, [{total,14357576}, {processes,6809612}, {processes_used,6788964}, {system,7547964}, {atom,560301}, {atom_used,557531}, {binary,179448}, {code,4570913}, {ets,833724}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1079}, {memory_data,{4284698624,4144566272,{<0.228.0>,786208}}}, {disk_data, [{"C:\\",48162864,55},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,137891840}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1070391,0}}, {context_switches,{345783,0}}, {garbage_collection,{65472,531152769,0}}, {io,{{input,21825817},{output,12458843}}}, {reductions,{328623676,3967918}}, {run_queue,0}, {runtime,{15818,172}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89068,369402}}, {active_buckets,["default"]}, {memory, [{total,14964656}, {processes,7253772}, {processes_used,7245580}, {system,7710884}, {atom,559325}, {atom_used,555631}, {binary,381760}, {code,4543239}, {ets,828684}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1010}, {memory_data,{4284698624,4170047488,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,42},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,511148032}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1000892,0}}, {context_switches,{152845,0}}, {garbage_collection,{44026,313869011,0}}, {io,{{input,27900010},{output,10322125}}}, {reductions,{86071368,1038797}}, {run_queue,0}, {runtime,{8236,171}}]}]}] INFO REPORT <0.105.0> 2011-01-03 13:11:09 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:10 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:12 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:14 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:17 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:19 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:22 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:24 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:25 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:26 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:29 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.63.0> 2011-01-03 13:11:29 =============================================================================== ns_1@10.2.1.100:ns_cluster:90: handling add_node("10.2.1.102", 8091, ..) INFO REPORT <0.63.0> 2011-01-03 13:11:29 =============================================================================== ns_1@10.2.1.100:ns_cluster:300: Posting node info to engage_cluster on {"10.2.1.102", 8091}: {struct, [{availableStorage, {struct, [{hdd, [{struct, [{path,<<"C:\\">>}, {sizeKBytes,48162864}, {usagePercent,56}]}, {struct, [{path,<<"D:\\">>}, {sizeKBytes,51279476}, {usagePercent,0}]}, {struct, [{path,<<"G:\\">>}, {sizeKBytes,34724465}, {usagePercent,17}]}]}]}}, {memoryQuota,3268}, {storageTotals, {struct, [{ram, {struct, [{usedByData,2282313229}, {total,4284698624}, {quotaTotal,3426746368}, {used,4147077120}]}}, {hdd, {struct, [{usedByData,3420593152}, {total,49318772736}, {quotaTotal,49318772736}, {used,27618512732}, {free,21700260004}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"c:/Program Files/Membase/Server/data/ns_1">>}, {quotaMb,none}, {state,ok}]}]}]}}, {uptime,<<"1100">>}, {memoryTotal,4284698624}, {memoryFree,137621504}, {mcdMemoryReserved,3268}, {mcdMemoryAllocated,3268}, {otpNode,<<"ns_1@10.2.1.100">>}, {otpCookie,<<"pmqchiglstnppkwf">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {hostname,<<"10.2.1.100:8091">>}, {clusterCompatibility,1}, {version,<<"1.6.5r">>}, {os,<<"windows">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]} INFO REPORT <0.63.0> 2011-01-03 13:11:29 =============================================================================== ns_1@10.2.1.100:ns_cluster:306: Reply from engage_cluster on {"10.2.1.102", 8091}: {ok,{struct,[{<<"availableStorage">>, {struct,[{<<"hdd">>, [{struct,[{<<"path">>,<<"C:\\">>}, {<<"sizeKBytes">>,49423972}, {<<"usagePercent">>,36}]}, {struct,[{<<"path">>,<<"D:\\">>}, {<<"sizeKBytes">>,52797620}, {<<"usagePercent">>,0}]}, {struct,[{<<"path">>,<<"G:\\">>}, {<<"sizeKBytes">>,34724465}, {<<"usagePercent">>,17}]}]}]}}, {<<"memoryQuota">>,3268}, {<<"storageTotals">>, {struct,[{<<"ram">>, {struct,[{<<"usedByData">>,0}, {<<"total">>,4284698624.0}, {<<"quotaTotal">>,3426746368.0}, {<<"used">>,578011136}]}}, {<<"hdd">>, {struct,[{<<"usedByData">>,0}, {<<"total">>,50610147328.0}, {<<"quotaTotal">>,50610147328.0}, {<<"used">>,18219653038.0}, {<<"free">>,32390494290.0}]}}]}}, {<<"storage">>, {struct,[{<<"ssd">>,[]}, {<<"hdd">>, [{struct,[{<<"path">>, <<"c:/Program Files/Membase/Server/data/ns_1">>}, {<<"quotaMb">>,<<"none">>}, {<<"state">>,<<"ok">>}]}]}]}}, {<<"uptime">>,<<"18">>}, {<<"memoryTotal">>,4284698624.0}, {<<"memoryFree">>,3706687488.0}, {<<"mcdMemoryReserved">>,3268}, {<<"mcdMemoryAllocated">>,3268}, {<<"otpNode">>,<<"ns_1@10.2.1.102">>}, {<<"otpCookie">>,<<"jstohszpzysoucmg">>}, {<<"clusterMembership">>,<<"active">>}, {<<"status">>,<<"healthy">>}, {<<"hostname">>,<<"10.2.1.102:8091">>}, {<<"clusterCompatibility">>,1}, {<<"version">>,<<"1.6.5r">>}, {<<"os">>,<<"windows">>}, {<<"ports">>, {struct,[{<<"proxy">>,11211},{<<"direct">>,11210}]}}]}} INFO REPORT <0.63.0> 2011-01-03 13:11:29 =============================================================================== ns_1@10.2.1.100:ns_cluster:371: port_please("ns_1", "10.2.1.102") = 21100 INFO REPORT <0.63.0> 2011-01-03 13:11:29 =============================================================================== ns_1@10.2.1.100:ns_cluster:461: Started node add transaction by adding node 'ns_1@10.2.1.102' to nodes_wanted INFO REPORT <0.65.0> 2011-01-03 13:11:29 =============================================================================== ns_node_disco_conf_events config on nodes_wanted INFO REPORT <0.65.0> 2011-01-03 13:11:29 =============================================================================== config change: nodes_wanted -> ['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102'] INFO REPORT <0.5149.0> 2011-01-03 13:11:29 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.5149.0> 2011-01-03 13:11:29 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], with cookie: pmqchiglstnppkwf INFO REPORT <0.65.0> 2011-01-03 13:11:29 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:11:29 =============================================================================== Pushing config INFO REPORT <0.63.0> 2011-01-03 13:11:29 =============================================================================== ns_1@10.2.1.100:ns_cluster:431: Posting the following to complete_join on "10.2.1.102:8091": {struct, [{<<"targetNode">>,'ns_1@10.2.1.102'}, {availableStorage, {struct, [{hdd, [{struct, [{path,<<"C:\\">>}, {sizeKBytes,48162864}, {usagePercent,56}]}, {struct, [{path,<<"D:\\">>}, {sizeKBytes,51279476}, {usagePercent,0}]}, {struct, [{path,<<"G:\\">>}, {sizeKBytes,34724465}, {usagePercent,17}]}]}]}}, {memoryQuota,3268}, {storageTotals, {struct, [{ram, {struct, [{usedByData,2282313229}, {total,4284698624}, {quotaTotal,3426746368}, {used,4147077120}]}}, {hdd, {struct, [{usedByData,3420593152}, {total,49318772736}, {quotaTotal,49318772736}, {used,27618512732}, {free,21700260004}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"c:/Program Files/Membase/Server/data/ns_1">>}, {quotaMb,none}, {state,ok}]}]}]}}, {uptime,<<"1100">>}, {memoryTotal,4284698624}, {memoryFree,137621504}, {mcdMemoryReserved,3268}, {mcdMemoryAllocated,3268}, {otpNode,<<"ns_1@10.2.1.100">>}, {otpCookie,<<"pmqchiglstnppkwf">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {hostname,<<"10.2.1.100:8091">>}, {clusterCompatibility,1}, {version,<<"1.6.5r">>}, {os,<<"windows">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]} INFO REPORT <0.5149.0> 2011-01-03 13:11:29 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.2.1.100','ns_1@10.2.1.101'], with cookie: pmqchiglstnppkwf INFO REPORT <0.85.0> 2011-01-03 13:11:29 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 13:11:29 =============================================================================== config change: {node,'ns_1@10.2.1.102',membership} -> inactiveAdded INFO REPORT <0.65.0> 2011-01-03 13:11:29 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:11:29 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:11:29 =============================================================================== Pushing config done INFO REPORT <0.79.0> 2011-01-03 13:11:29 =============================================================================== ns_log: logging ns_node_disco:4:Node 'ns_1@10.2.1.100' saw that node 'ns_1@10.2.1.102' came up. INFO REPORT <0.65.0> 2011-01-03 13:11:30 =============================================================================== config change: {node,'ns_1@10.2.1.102',memcached} -> [{port,11210}, {dbdir,"c:/Program Files/Membase/Server/data/ns_1"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"./bin/bucket_engine/bucket_engine.so"}, {engines,[{membase,[{engine,"bin/ep_engine/ep.so"}, {initfile,"priv/init.sql"}]}, {memcached,[{engine,"bin/memcached/default_engine.so"}]}]}, {verbosity,[]}] INFO REPORT <0.65.0> 2011-01-03 13:11:30 =============================================================================== config change: {node,'ns_1@10.2.1.102',ns_log} -> [{filename,"c:/Program Files/Membase/Server/data/ns_1/ns_log"}] INFO REPORT <0.65.0> 2011-01-03 13:11:30 =============================================================================== config change: {node,'ns_1@10.2.1.102',isasl} -> [{path,"c:/Program Files/Membase/Server/data/ns_1/isasl.pw"}] INFO REPORT <0.65.0> 2011-01-03 13:11:30 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:11:30 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:11:30 =============================================================================== Pushing config done INFO REPORT <0.105.0> 2011-01-03 13:11:30 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.63.0> 2011-01-03 13:11:30 =============================================================================== ns_1@10.2.1.100:ns_cluster:437: Reply from complete_join on "10.2.1.102:8091": {ok,[]} INFO REPORT <0.63.0> 2011-01-03 13:11:30 =============================================================================== ns_1@10.2.1.100:ns_cluster:92: add_node("10.2.1.102", 8091, ..) -> {ok, 'ns_1@10.2.1.102'} INFO REPORT <0.105.0> 2011-01-03 13:11:31 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:11:32 =============================================================================== ns_log: logging ns_orchestrator:4:Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] INFO REPORT <0.65.0> 2011-01-03 13:11:32 =============================================================================== config change: {node,'ns_1@10.2.1.102',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:11:32 =============================================================================== config change: {node,'ns_1@10.2.1.101',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:11:32 =============================================================================== config change: {node,'ns_1@10.2.1.100',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:11:32 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:11:32 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:11:32 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 13:11:32 =============================================================================== config change: rebalance_status -> running INFO REPORT <0.65.0> 2011-01-03 13:11:32 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:11:32 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:11:32 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 13:11:32 =============================================================================== config change: buckets -> [{configs,[{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']}, {map,[['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101'|...], [...]|...]}]}]}] INFO REPORT <0.65.0> 2011-01-03 13:11:32 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:11:32 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:11:32 =============================================================================== Pushing config done INFO REPORT <0.105.0> 2011-01-03 13:11:33 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5183.0> 2011-01-03 13:11:33 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:11:34 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5183.0> 2011-01-03 13:11:34 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.78.0> 2011-01-03 13:11:34 =============================================================================== Detected a new node (from node 'ns_1@10.2.1.100'). Moving config around. INFO REPORT <0.78.0> 2011-01-03 13:11:34 =============================================================================== ns_node_disco_log: nodes changed: ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'] INFO REPORT <0.85.0> 2011-01-03 13:11:34 =============================================================================== Pulling config INFO REPORT <0.85.0> 2011-01-03 13:11:34 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:11:34 =============================================================================== Pulling config done INFO REPORT <0.5183.0> 2011-01-03 13:11:35 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:11:36 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5183.0> 2011-01-03 13:11:36 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:11:37 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5183.0> 2011-01-03 13:11:37 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.110.0> 2011-01-03 13:11:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5183.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0]], [['ns_1@10.2.1.101'| 0.0]], [['ns_1@10.2.1.102'| 0.0]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5183.0> 2011-01-03 13:11:38 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.110.0> 2011-01-03 13:11:38 =============================================================================== ns_log: logging ns_orchestrator:2:Rebalance exited with reason wait_for_memcached_failed INFO REPORT <0.65.0> 2011-01-03 13:11:38 =============================================================================== config change: rebalance_status -> {none,<<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>} INFO REPORT <0.65.0> 2011-01-03 13:11:38 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:11:38 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:11:38 =============================================================================== Pushing config done INFO REPORT <0.105.0> 2011-01-03 13:11:39 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:41 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:43 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:46 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:48 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:49 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5272.0> 2011-01-03 13:11:49 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.5272.0> 2011-01-03 13:11:50 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:11:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:11:50 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.5272.0> 2011-01-03 13:11:51 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:11:52 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5272.0> 2011-01-03 13:11:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.5272.0> 2011-01-03 13:11:53 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] ERROR REPORT <0.5272.0> 2011-01-03 13:11:53 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:11:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:11:55 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.65.0> 2011-01-03 13:11:55 =============================================================================== config change: {node,'ns_1@10.2.1.102',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:11:55 =============================================================================== config change: {node,'ns_1@10.2.1.101',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:11:55 =============================================================================== config change: {node,'ns_1@10.2.1.100',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:11:55 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:11:55 =============================================================================== Pushing config INFO REPORT <0.110.0> 2011-01-03 13:11:55 =============================================================================== ns_log: logging ns_orchestrator:4:Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] INFO REPORT <0.72.0> 2011-01-03 13:11:55 =============================================================================== ns_log: suppressing duplicate log ns_orchestrator:4("Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101',\n 'ns_1@10.2.1.102'], EjectNodes = []\n") because it's been seen 1 times in the past 23.338 secs (last seen 23.338 secs ago INFO REPORT <0.85.0> 2011-01-03 13:11:55 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 13:11:55 =============================================================================== config change: rebalance_status -> running INFO REPORT <0.65.0> 2011-01-03 13:11:55 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:11:55 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:11:55 =============================================================================== Pushing config done INFO REPORT <0.105.0> 2011-01-03 13:11:56 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5293.0> 2011-01-03 13:11:56 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:11:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5293.0> 2011-01-03 13:11:57 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.110.0> 2011-01-03 13:11:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5293.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0]], [['ns_1@10.2.1.101'| 0.0]], [['ns_1@10.2.1.102'| 0.0]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5293.0> 2011-01-03 13:11:58 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:11:59 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5293.0> 2011-01-03 13:11:59 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:00 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5293.0> 2011-01-03 13:12:01 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:01 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5293.0> 2011-01-03 13:12:02 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.110.0> 2011-01-03 13:12:02 =============================================================================== ns_log: logging ns_orchestrator:2:Rebalance exited with reason wait_for_memcached_failed INFO REPORT <0.72.0> 2011-01-03 13:12:02 =============================================================================== ns_log: suppressing duplicate log ns_orchestrator:2("Rebalance exited with reason wait_for_memcached_failed\n") because it's been seen 1 times in the past 23.322 secs (last seen 23.322 secs ago INFO REPORT <0.65.0> 2011-01-03 13:12:02 =============================================================================== config change: rebalance_status -> {none,<<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>} INFO REPORT <0.65.0> 2011-01-03 13:12:02 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:12:02 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:12:02 =============================================================================== Pushing config done INFO REPORT <0.105.0> 2011-01-03 13:12:03 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:04 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:06 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:12:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89127,400401}}, {active_buckets,["default"]}, {memory, [{total,15049264}, {processes,7449116}, {processes_used,7428924}, {system,7600148}, {atom,560301}, {atom_used,557531}, {binary,179896}, {code,4570913}, {ets,869868}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1139}, {memory_data,{4284698624,4147077120,{<0.228.0>,786208}}}, {disk_data, [{"C:\\",48162864,56},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,120258560}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1130405,0}}, {context_switches,{368057,0}}, {garbage_collection,{69369,576642742,0}}, {io,{{input,23848641},{output,14650959}}}, {reductions,{349184892,2038920}}, {run_queue,0}, {runtime,{16941,125}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89128,398579}}, {active_buckets,["default"]}, {memory, [{total,15680720}, {processes,7885396}, {processes_used,7876812}, {system,7795324}, {atom,559325}, {atom_used,555631}, {binary,452128}, {code,4543239}, {ets,827964}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1070}, {memory_data,{4284698624,3784560640,{<11993.298.0>,1086308}}}, {disk_data, [{"C:\\",46243100,43},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,383340544}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1060890,0}}, {context_switches,{164719,0}}, {garbage_collection,{46859,343994450,0}}, {io,{{input,29147756},{output,11424076}}}, {reductions,{93961383,1350037}}, {run_queue,0}, {runtime,{9001,78}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89128,133400}}, {active_buckets,[]}, {memory, [{total,11268576}, {processes,4716452}, {processes_used,4704012}, {system,6552124}, {atom,529853}, {atom_used,510114}, {binary,167488}, {code,4150609}, {ets,296972}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,58}, {memory_data,{4284698624,578011136,{<10870.7.0>,98440}}}, {disk_data, [{"C:\\",49423972,36},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3786592256}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{50326,0}}, {context_switches,{15714,0}}, {garbage_collection,{2777,7241822,0}}, {io,{{input,5123694},{output,1569657}}}, {reductions,{2886392,154585}}, {run_queue,0}, {runtime,{592,31}}]}]}] INFO REPORT <0.5353.0> 2011-01-03 13:12:09 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:09 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5353.0> 2011-01-03 13:12:10 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:11 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5353.0> 2011-01-03 13:12:11 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:12 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5353.0> 2011-01-03 13:12:12 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.5353.0> 2011-01-03 13:12:13 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] ERROR REPORT <0.5353.0> 2011-01-03 13:12:13 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:15 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:17 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:18 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5390.0> 2011-01-03 13:12:19 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.5390.0> 2011-01-03 13:12:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5390.0> 2011-01-03 13:12:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:22 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5390.0> 2011-01-03 13:12:22 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.5390.0> 2011-01-03 13:12:23 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] ERROR REPORT <0.5390.0> 2011-01-03 13:12:23 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:24 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:25 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:26 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:29 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5410.0> 2011-01-03 13:12:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:30 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5410.0> 2011-01-03 13:12:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.5410.0> 2011-01-03 13:12:31 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:31 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.259.0> 2011-01-03 13:12:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 514 auth_errors 0 bucket_conns 3 bytes_read 4789946018 bytes_written 79206150 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 1725729 conn_yields 186 connection_structures 235 curr_connections 25 curr_items 1724458 curr_items_tot 2705587 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 2493 ep_commit_time 1 ep_commit_time_total 829 ep_data_age 357 ep_data_age_highwat 493 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 192 ep_flush_duration_highwat 192 ep_flush_duration_total 582 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 6919 ep_io_num_read 0 ep_io_num_write 2388113 ep_io_read_bytes 0 ep_io_write_bytes 3379491139 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2233181069 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 737997 ep_num_eject_failures 1297672 ep_num_eject_replicas 581268 ep_num_expiry_pager_runs 0 ep_num_non_resident 1319144 ep_num_not_my_vbuckets 0 ep_num_pager_runs 5 ep_num_value_ejects 1319384 ep_oom_errors 0 ep_overhead 40832288 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 311533 ep_storage_age 356 ep_storage_age_highwat 490 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 4080318669 ep_total_del_items 0 ep_total_enqueued 2706654 ep_total_new_items 2387334 ep_total_persisted 2388112 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2274013357 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 513 tap_mutation_received 1574844 tap_mutation_sent 2055959 tap_opaque_received 513 tap_opaque_sent 1025 tap_vbucket_set_sent 1024 threads 4 time 1294089151 total_connections 818 uptime 1165 version 1.4.4_304_g7d5a132 INFO REPORT <0.5410.0> 2011-01-03 13:12:32 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:33 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5410.0> 2011-01-03 13:12:33 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] ERROR REPORT <0.5410.0> 2011-01-03 13:12:33 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:34 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:12:36 =============================================================================== ns_log: logging ns_orchestrator:4:Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] INFO REPORT <0.72.0> 2011-01-03 13:12:36 =============================================================================== ns_log: suppressing duplicate log ns_orchestrator:4("Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101',\n 'ns_1@10.2.1.102'], EjectNodes = []\n") because it's been seen 2 times in the past 64.085 secs (last seen 40.747 secs ago INFO REPORT <0.65.0> 2011-01-03 13:12:36 =============================================================================== config change: {node,'ns_1@10.2.1.102',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:12:36 =============================================================================== config change: {node,'ns_1@10.2.1.101',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:12:36 =============================================================================== config change: {node,'ns_1@10.2.1.100',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:12:36 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:12:36 =============================================================================== Pushing config INFO REPORT <0.105.0> 2011-01-03 13:12:36 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:12:36 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 13:12:36 =============================================================================== config change: rebalance_status -> running INFO REPORT <0.65.0> 2011-01-03 13:12:36 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:12:36 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:12:36 =============================================================================== Pushing config done INFO REPORT <0.5433.0> 2011-01-03 13:12:37 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.110.0> 2011-01-03 13:12:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5433.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0]], [['ns_1@10.2.1.101'| 0.0]], [['ns_1@10.2.1.102'| 0.0]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5433.0> 2011-01-03 13:12:38 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:38 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5433.0> 2011-01-03 13:12:39 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:40 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5433.0> 2011-01-03 13:12:40 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.5433.0> 2011-01-03 13:12:41 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:41 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5433.0> 2011-01-03 13:12:42 =============================================================================== ns_1@10.2.1.100:ns_rebalancer:420: Waiting for ['ns_1@10.2.1.102'] INFO REPORT <0.110.0> 2011-01-03 13:12:42 =============================================================================== ns_log: logging ns_orchestrator:2:Rebalance exited with reason wait_for_memcached_failed INFO REPORT <0.72.0> 2011-01-03 13:12:42 =============================================================================== ns_log: suppressing duplicate log ns_orchestrator:2("Rebalance exited with reason wait_for_memcached_failed\n") because it's been seen 2 times in the past 64.069 secs (last seen 40.747 secs ago INFO REPORT <0.65.0> 2011-01-03 13:12:42 =============================================================================== config change: rebalance_status -> {none,<<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>} INFO REPORT <0.65.0> 2011-01-03 13:12:42 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:12:42 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:12:42 =============================================================================== Pushing config done INFO REPORT <0.105.0> 2011-01-03 13:12:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:45 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:46 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:48 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:12:48 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.5493.0> 2011-01-03 13:12:49 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5493.0> 2011-01-03 13:12:50 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:51 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5493.0> 2011-01-03 13:12:51 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.5493.0> 2011-01-03 13:12:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:52 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.5493.0> 2011-01-03 13:12:53 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.102'] ERROR REPORT <0.5493.0> 2011-01-03 13:12:53 =============================================================================== ns_1@10.2.1.100:ns_janitor:57: Bucket "default" not yet ready on ['ns_1@10.2.1.102'] INFO REPORT <0.105.0> 2011-01-03 13:12:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:56 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:12:58 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:13:00 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:13:01 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:13:04 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:13:05 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:13:06 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:13:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:13:08 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.93.0> 2011-01-03 13:13:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89187,413401}}, {active_buckets,["default"]}, {memory, [{total,14764496}, {processes,7036932}, {processes_used,7016740}, {system,7727564}, {atom,560301}, {atom_used,557531}, {binary,273600}, {code,4570913}, {ets,904044}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1199}, {memory_data,{4284698624,4165287936,{<0.299.0>,1757252}}}, {disk_data, [{"C:\\",48162864,56},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,117223424}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1190403,0}}, {context_switches,{383641,0}}, {garbage_collection,{72413,614947903,0}}, {io,{{input,24634527},{output,15541087}}}, {reductions,{360161142,1626364}}, {run_queue,0}, {runtime,{17409,78}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89188,365400}}, {active_buckets,["default"]}, {memory, [{total,15250800}, {processes,7436660}, {processes_used,7428108}, {system,7814140}, {atom,559325}, {atom_used,555631}, {binary,437504}, {code,4543239}, {ets,861876}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1130}, {memory_data,{4284698624,3956031488,{<11993.387.0>,1271680}}}, {disk_data, [{"C:\\",46243100,43},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,259932160}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1120888,0}}, {context_switches,{175355,0}}, {garbage_collection,{49289,368916863,0}}, {io,{{input,29672053},{output,12056999}}}, {reductions,{101444016,1102892}}, {run_queue,0}, {runtime,{9531,47}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89188,147400}}, {active_buckets,["default"]}, {memory, [{total,11709136}, {processes,4758884}, {processes_used,4746900}, {system,6950252}, {atom,541077}, {atom_used,528424}, {binary,397416}, {code,4280811}, {ets,320036}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,118}, {memory_data,{4284698624,497897472,{<10870.218.0>,1271780}}}, {disk_data, [{"C:\\",49423972,36},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3629748224}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{110324,0}}, {context_switches,{19547,0}}, {garbage_collection,{3321,10101615,0}}, {io,{{input,5287291},{output,1847880}}}, {reductions,{3751629,161331}}, {run_queue,0}, {runtime,{780,47}}]}]}] INFO REPORT <0.105.0> 2011-01-03 13:13:10 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:13:11 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:13:14 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_513 for 1.00 secs INFO REPORT <0.65.0> 2011-01-03 13:13:14 =============================================================================== config change: {node,'ns_1@10.2.1.102',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:13:14 =============================================================================== config change: {node,'ns_1@10.2.1.101',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:13:14 =============================================================================== config change: {node,'ns_1@10.2.1.100',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:13:14 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:13:14 =============================================================================== Pushing config INFO REPORT <0.110.0> 2011-01-03 13:13:14 =============================================================================== ns_log: logging ns_orchestrator:4:Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] INFO REPORT <0.72.0> 2011-01-03 13:13:14 =============================================================================== ns_log: suppressing duplicate log ns_orchestrator:4("Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101',\n 'ns_1@10.2.1.102'], EjectNodes = []\n") because it's been seen 3 times in the past 102.071 secs (last seen 37.986 secs ago INFO REPORT <0.85.0> 2011-01-03 13:13:14 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 13:13:14 =============================================================================== config change: rebalance_status -> running INFO REPORT <0.65.0> 2011-01-03 13:13:14 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:13:14 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:13:14 =============================================================================== Pushing config done INFO REPORT <0.5600.0> 2011-01-03 13:13:16 =============================================================================== vbucketmigrator<0.5600.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5600.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5600.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5600.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5600.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5600.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5600.0>: Starting to move bucket 512 INFO REPORT <0.110.0> 2011-01-03 13:13:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0]], [['ns_1@10.2.1.101'| 0.0]], [['ns_1@10.2.1.102'| 0.0]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.3861.0> 2011-01-03 13:13:22 =============================================================================== vbucketmigrator<0.3861.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.3861.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3861.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3861.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.3861.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3861.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3861.0>: Starting to move bucket 0 INFO REPORT <0.5600.0> 2011-01-03 13:13:22 =============================================================================== vbucketmigrator<0.5600.0>: Bucket 512 moved to the next server vbucketmigrator<0.5600.0>: Validate bucket states vbucketmigrator<0.5600.0>: 512 ok INFO REPORT <0.5623.0> 2011-01-03 13:13:23 =============================================================================== vbucketmigrator<0.5623.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5623.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5623.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5623.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5623.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5623.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5623.0>: Starting to move bucket 513 INFO REPORT <0.110.0> 2011-01-03 13:13:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.005847953216374324]], [['ns_1@10.2.1.101'| 0.00588235294117645]], [['ns_1@10.2.1.102'| 0.0058651026392961825]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5623.0> 2011-01-03 13:13:28 =============================================================================== vbucketmigrator<0.5623.0>: Bucket 513 moved to the next server vbucketmigrator<0.5623.0>: Validate bucket states vbucketmigrator<0.5623.0>: 513 ok INFO REPORT <0.5640.0> 2011-01-03 13:13:29 =============================================================================== vbucketmigrator<0.5640.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5640.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5640.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5640.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5640.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5640.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5640.0>: Starting to move bucket 514 INFO REPORT <11993.3861.0> 2011-01-03 13:13:33 =============================================================================== vbucketmigrator<0.3861.0>: Bucket 0 moved to the next server vbucketmigrator<0.3861.0>: Validate bucket states vbucketmigrator<0.3861.0>: 0 ok INFO REPORT <11993.3900.0> 2011-01-03 13:13:34 =============================================================================== vbucketmigrator<0.3900.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.3900.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3900.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3900.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.3900.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3900.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3900.0>: Starting to move bucket 1 INFO REPORT <0.5640.0> 2011-01-03 13:13:34 =============================================================================== vbucketmigrator<0.5640.0>: Bucket 514 moved to the next server vbucketmigrator<0.5640.0>: Validate bucket states vbucketmigrator<0.5640.0>: 514 ok INFO REPORT <0.5661.0> 2011-01-03 13:13:35 =============================================================================== vbucketmigrator<0.5661.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5661.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5661.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5661.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5661.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5661.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5661.0>: Starting to move bucket 515 INFO REPORT <0.110.0> 2011-01-03 13:13:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.01754385964912286]], [['ns_1@10.2.1.101'| 0.0117647058823529]], [['ns_1@10.2.1.102'| 0.014662756598240456]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5661.0> 2011-01-03 13:13:40 =============================================================================== vbucketmigrator<0.5661.0>: Bucket 515 moved to the next server vbucketmigrator<0.5661.0>: Validate bucket states vbucketmigrator<0.5661.0>: 515 ok INFO REPORT <0.5695.0> 2011-01-03 13:13:41 =============================================================================== vbucketmigrator<0.5695.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5695.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5695.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5695.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5695.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5695.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5695.0>: Starting to move bucket 516 INFO REPORT <11993.3900.0> 2011-01-03 13:13:41 =============================================================================== vbucketmigrator<0.3900.0>: Bucket 1 moved to the next server vbucketmigrator<0.3900.0>: Validate bucket states vbucketmigrator<0.3900.0>: 1 ok INFO REPORT <11993.3920.0> 2011-01-03 13:13:42 =============================================================================== vbucketmigrator<0.3920.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.3920.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3920.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3920.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.3920.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3920.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3920.0>: Starting to move bucket 2 INFO REPORT <0.85.0> 2011-01-03 13:13:45 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.5695.0> 2011-01-03 13:13:45 =============================================================================== vbucketmigrator<0.5695.0>: Bucket 516 moved to the next server vbucketmigrator<0.5695.0>: Validate bucket states vbucketmigrator<0.5695.0>: 516 ok INFO REPORT <0.5721.0> 2011-01-03 13:13:46 =============================================================================== vbucketmigrator<0.5721.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5721.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5721.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5721.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5721.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5721.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5721.0>: Starting to move bucket 517 INFO REPORT <11993.3920.0> 2011-01-03 13:13:48 =============================================================================== vbucketmigrator<0.3920.0>: Bucket 2 moved to the next server vbucketmigrator<0.3920.0>: Validate bucket states vbucketmigrator<0.3920.0>: 2 ok INFO REPORT <0.110.0> 2011-01-03 13:13:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.03508771929824561]], [['ns_1@10.2.1.101'| 0.01764705882352946]], [['ns_1@10.2.1.102'| 0.02639296187683282]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.3942.0> 2011-01-03 13:13:49 =============================================================================== vbucketmigrator<0.3942.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.3942.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3942.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3942.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.3942.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3942.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3942.0>: Starting to move bucket 3 INFO REPORT <0.5721.0> 2011-01-03 13:13:51 =============================================================================== vbucketmigrator<0.5721.0>: Bucket 517 moved to the next server vbucketmigrator<0.5721.0>: Validate bucket states vbucketmigrator<0.5721.0>: 517 ok INFO REPORT <0.5741.0> 2011-01-03 13:13:52 =============================================================================== vbucketmigrator<0.5741.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5741.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5741.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5741.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5741.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5741.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5741.0>: Starting to move bucket 518 INFO REPORT <11993.3942.0> 2011-01-03 13:13:55 =============================================================================== vbucketmigrator<0.3942.0>: Bucket 3 moved to the next server vbucketmigrator<0.3942.0>: Validate bucket states vbucketmigrator<0.3942.0>: 3 ok INFO REPORT <11993.3957.0> 2011-01-03 13:13:56 =============================================================================== vbucketmigrator<0.3957.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.3957.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3957.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3957.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.3957.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3957.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3957.0>: Starting to move bucket 4 INFO REPORT <0.5741.0> 2011-01-03 13:13:56 =============================================================================== vbucketmigrator<0.5741.0>: Bucket 518 moved to the next server vbucketmigrator<0.5741.0>: Validate bucket states vbucketmigrator<0.5741.0>: 518 ok INFO REPORT <0.85.0> 2011-01-03 13:13:57 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.5759.0> 2011-01-03 13:13:57 =============================================================================== vbucketmigrator<0.5759.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5759.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5759.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5759.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5759.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5759.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5759.0>: Starting to move bucket 519 INFO REPORT <0.110.0> 2011-01-03 13:13:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.040935672514619936]], [['ns_1@10.2.1.101'| 0.02941176470588236]], [['ns_1@10.2.1.102'| 0.035190615835777095]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5759.0> 2011-01-03 13:14:01 =============================================================================== vbucketmigrator<0.5759.0>: Bucket 519 moved to the next server vbucketmigrator<0.5759.0>: Validate bucket states vbucketmigrator<0.5759.0>: 519 ok INFO REPORT <0.5773.0> 2011-01-03 13:14:02 =============================================================================== vbucketmigrator<0.5773.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5773.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5773.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5773.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5773.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5773.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5773.0>: Starting to move bucket 520 INFO REPORT <11993.3957.0> 2011-01-03 13:14:03 =============================================================================== vbucketmigrator<0.3957.0>: Bucket 4 moved to the next server vbucketmigrator<0.3957.0>: Validate bucket states vbucketmigrator<0.3957.0>: 4 ok INFO REPORT <11993.3977.0> 2011-01-03 13:14:04 =============================================================================== vbucketmigrator<0.3977.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.3977.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3977.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3977.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.3977.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3977.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3977.0>: Starting to move bucket 5 INFO REPORT <0.5773.0> 2011-01-03 13:14:07 =============================================================================== vbucketmigrator<0.5773.0>: Bucket 520 moved to the next server vbucketmigrator<0.5773.0>: Validate bucket states vbucketmigrator<0.5773.0>: 520 ok INFO REPORT <0.93.0> 2011-01-03 13:14:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89247,411401}}, {active_buckets,["default"]}, {memory, [{total,18118776}, {processes,10428148}, {processes_used,10411556}, {system,7690628}, {atom,560301}, {atom_used,557531}, {binary,236624}, {code,4570913}, {ets,902428}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1259}, {memory_data,{4284698624,4167852032,{<0.299.0>,1757252}}}, {disk_data, [{"C:\\",48162864,57},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,98918400}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1250401,0}}, {context_switches,{404221,0}}, {garbage_collection,{76081,659820365,0}}, {io,{{input,26104454},{output,16359884}}}, {reductions,{378830177,2141346}}, {run_queue,0}, {runtime,{18361,94}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89248,378402}}, {active_buckets,["default"]}, {memory, [{total,14688384}, {processes,6922948}, {processes_used,6914748}, {system,7765436}, {atom,559813}, {atom_used,556363}, {binary,343240}, {code,4551541}, {ets,898564}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1190}, {memory_data,{4284698624,4071444480,{<11993.387.0>,1271680}}}, {disk_data, [{"C:\\",46243100,43},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,111742976}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1180901,0}}, {context_switches,{187191,0}}, {garbage_collection,{52261,395741238,0}}, {io,{{input,31024933},{output,13266452}}}, {reductions,{109451191,1105732}}, {run_queue,0}, {runtime,{10140,47}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89248,144400}}, {active_buckets,["default"]}, {memory, [{total,11940264}, {processes,4993540}, {processes_used,4981076}, {system,6946724}, {atom,541077}, {atom_used,528589}, {binary,310304}, {code,4280811}, {ets,402692}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,178}, {memory_data,{4284698624,656019456,{<10870.218.0>,1271780}}}, {disk_data, [{"C:\\",49423972,36},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3535859712}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{170322,0}}, {context_switches,{24908,0}}, {garbage_collection,{4714,16260601,0}}, {io,{{input,5702225},{output,2325742}}}, {reductions,{6193052,574699}}, {run_queue,0}, {runtime,{1404,16}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:14:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.052631578947368474]], [['ns_1@10.2.1.101'| 0.03529411764705881]], [['ns_1@10.2.1.102'| 0.04398826979472137]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5785.0> 2011-01-03 13:14:08 =============================================================================== vbucketmigrator<0.5785.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5785.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5785.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5785.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5785.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5785.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5785.0>: Starting to move bucket 521 INFO REPORT <0.259.0> 2011-01-03 13:14:11 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 524 auth_errors 0 bucket_conns 32 bytes_read 4933039496 bytes_written 82822972 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 1824514 conn_yields 186 connection_structures 235 curr_connections 54 curr_items 1789187 curr_items_tot 2801220 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 2702 ep_commit_time 0 ep_commit_time_total 904 ep_data_age 192 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 297 ep_flush_duration_highwat 297 ep_flush_duration_total 879 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 112102 ep_io_num_read 31777 ep_io_num_write 2594464 ep_io_read_bytes 44977082 ep_io_write_bytes 3671698096 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2134321724 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 724517 ep_num_eject_failures 1297672 ep_num_eject_replicas 755053 ep_num_expiry_pager_runs 0 ep_num_non_resident 1492948 ep_num_not_my_vbuckets 3005 ep_num_pager_runs 5 ep_num_value_ejects 1493253 ep_oom_errors 0 ep_overhead 35557091 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 95776 ep_storage_age 191 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 31777 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 4224875924 ep_total_del_items 0 ep_total_enqueued 2802433 ep_total_new_items 2593558 ep_total_persisted 2594463 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2169878815 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 523 tap_mutation_received 1574844 tap_mutation_sent 2220165 tap_opaque_received 513 tap_opaque_sent 1035 tap_vbucket_set_sent 1043 threads 4 time 1294089250 total_connections 858 uptime 1264 version 1.4.4_304_g7d5a132 INFO REPORT <11993.3977.0> 2011-01-03 13:14:12 =============================================================================== vbucketmigrator<0.3977.0>: Bucket 5 moved to the next server vbucketmigrator<0.3977.0>: Validate bucket states vbucketmigrator<0.3977.0>: 5 ok INFO REPORT <0.5785.0> 2011-01-03 13:14:13 =============================================================================== vbucketmigrator<0.5785.0>: Bucket 521 moved to the next server vbucketmigrator<0.5785.0>: Validate bucket states vbucketmigrator<0.5785.0>: 521 ok INFO REPORT <0.5818.0> 2011-01-03 13:14:14 =============================================================================== vbucketmigrator<0.5818.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5818.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5818.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5818.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5818.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5818.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5818.0>: Starting to move bucket 522 INFO REPORT <11993.3996.0> 2011-01-03 13:14:14 =============================================================================== vbucketmigrator<0.3996.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.3996.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3996.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.3996.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.3996.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3996.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.3996.0>: Starting to move bucket 6 INFO REPORT <0.110.0> 2011-01-03 13:14:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.05847953216374269]], [['ns_1@10.2.1.101'| 0.04117647058823526]], [['ns_1@10.2.1.102'| 0.04985337243401755]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5818.0> 2011-01-03 13:14:19 =============================================================================== vbucketmigrator<0.5818.0>: Bucket 522 moved to the next server vbucketmigrator<0.5818.0>: Validate bucket states vbucketmigrator<0.5818.0>: 522 ok INFO REPORT <0.5836.0> 2011-01-03 13:14:20 =============================================================================== vbucketmigrator<0.5836.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5836.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5836.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5836.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5836.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5836.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5836.0>: Starting to move bucket 523 INFO REPORT <11993.3996.0> 2011-01-03 13:14:24 =============================================================================== vbucketmigrator<0.3996.0>: Bucket 6 moved to the next server vbucketmigrator<0.3996.0>: Validate bucket states vbucketmigrator<0.3996.0>: 6 ok INFO REPORT <0.5836.0> 2011-01-03 13:14:24 =============================================================================== vbucketmigrator<0.5836.0>: Bucket 523 moved to the next server vbucketmigrator<0.5836.0>: Validate bucket states vbucketmigrator<0.5836.0>: 523 ok INFO REPORT <0.5845.0> 2011-01-03 13:14:25 =============================================================================== vbucketmigrator<0.5845.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5845.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5845.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5845.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5845.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5845.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5845.0>: Starting to move bucket 524 INFO REPORT <11993.4022.0> 2011-01-03 13:14:26 =============================================================================== vbucketmigrator<0.4022.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4022.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4022.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4022.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4022.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4022.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4022.0>: Starting to move bucket 7 INFO REPORT <0.110.0> 2011-01-03 13:14:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.07017543859649122]], [['ns_1@10.2.1.101'| 0.04705882352941182]], [['ns_1@10.2.1.102'| 0.058651026392961825]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5845.0> 2011-01-03 13:14:30 =============================================================================== vbucketmigrator<0.5845.0>: Bucket 524 moved to the next server vbucketmigrator<0.5845.0>: Validate bucket states vbucketmigrator<0.5845.0>: 524 ok INFO REPORT <0.5860.0> 2011-01-03 13:14:31 =============================================================================== vbucketmigrator<0.5860.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5860.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5860.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5860.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5860.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5860.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5860.0>: Starting to move bucket 525 INFO REPORT <11993.4022.0> 2011-01-03 13:14:34 =============================================================================== vbucketmigrator<0.4022.0>: Bucket 7 moved to the next server vbucketmigrator<0.4022.0>: Validate bucket states vbucketmigrator<0.4022.0>: 7 ok INFO REPORT <0.5860.0> 2011-01-03 13:14:35 =============================================================================== vbucketmigrator<0.5860.0>: Bucket 525 moved to the next server vbucketmigrator<0.5860.0>: Validate bucket states vbucketmigrator<0.5860.0>: 525 ok INFO REPORT <11993.4046.0> 2011-01-03 13:14:36 =============================================================================== vbucketmigrator<0.4046.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4046.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4046.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4046.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4046.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4046.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4046.0>: Starting to move bucket 8 INFO REPORT <0.5875.0> 2011-01-03 13:14:36 =============================================================================== vbucketmigrator<0.5875.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5875.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5875.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5875.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5875.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5875.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5875.0>: Starting to move bucket 526 INFO REPORT <0.110.0> 2011-01-03 13:14:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.08187134502923976]], [['ns_1@10.2.1.101'| 0.05294117647058827]], [['ns_1@10.2.1.102'| 0.06744868035190621]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5875.0> 2011-01-03 13:14:42 =============================================================================== vbucketmigrator<0.5875.0>: Bucket 526 moved to the next server vbucketmigrator<0.5875.0>: Validate bucket states vbucketmigrator<0.5875.0>: 526 ok INFO REPORT <0.5892.0> 2011-01-03 13:14:43 =============================================================================== vbucketmigrator<0.5892.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5892.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5892.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5892.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5892.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5892.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5892.0>: Starting to move bucket 527 INFO REPORT <11993.4046.0> 2011-01-03 13:14:44 =============================================================================== vbucketmigrator<0.4046.0>: Bucket 8 moved to the next server vbucketmigrator<0.4046.0>: Validate bucket states vbucketmigrator<0.4046.0>: 8 ok INFO REPORT <11993.4068.0> 2011-01-03 13:14:46 =============================================================================== vbucketmigrator<0.4068.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4068.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4068.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4068.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4068.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4068.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4068.0>: Starting to move bucket 9 INFO REPORT <0.5892.0> 2011-01-03 13:14:47 =============================================================================== vbucketmigrator<0.5892.0>: Bucket 527 moved to the next server vbucketmigrator<0.5892.0>: Validate bucket states vbucketmigrator<0.5892.0>: 527 ok INFO REPORT <0.110.0> 2011-01-03 13:14:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0935672514619883]], [['ns_1@10.2.1.101'| 0.05882352941176472]], [['ns_1@10.2.1.102'| 0.07624633431085048]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5912.0> 2011-01-03 13:14:48 =============================================================================== vbucketmigrator<0.5912.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5912.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5912.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5912.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5912.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5912.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5912.0>: Starting to move bucket 528 INFO REPORT <0.85.0> 2011-01-03 13:14:53 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.5912.0> 2011-01-03 13:14:53 =============================================================================== vbucketmigrator<0.5912.0>: Bucket 528 moved to the next server vbucketmigrator<0.5912.0>: Validate bucket states vbucketmigrator<0.5912.0>: 528 ok INFO REPORT <0.5936.0> 2011-01-03 13:14:54 =============================================================================== vbucketmigrator<0.5936.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5936.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5936.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5936.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5936.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5936.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5936.0>: Starting to move bucket 529 INFO REPORT <11993.4068.0> 2011-01-03 13:14:56 =============================================================================== vbucketmigrator<0.4068.0>: Bucket 9 moved to the next server vbucketmigrator<0.4068.0>: Validate bucket states vbucketmigrator<0.4068.0>: 9 ok INFO REPORT <11993.4095.0> 2011-01-03 13:14:58 =============================================================================== vbucketmigrator<0.4095.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4095.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4095.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4095.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4095.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4095.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4095.0>: Starting to move bucket 10 INFO REPORT <0.110.0> 2011-01-03 13:14:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.10526315789473684]], [['ns_1@10.2.1.101'| 0.05882352941176472]], [['ns_1@10.2.1.102'| 0.08211143695014667]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5936.0> 2011-01-03 13:14:59 =============================================================================== vbucketmigrator<0.5936.0>: Bucket 529 moved to the next server vbucketmigrator<0.5936.0>: Validate bucket states vbucketmigrator<0.5936.0>: 529 ok INFO REPORT <0.5959.0> 2011-01-03 13:15:00 =============================================================================== vbucketmigrator<0.5959.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5959.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5959.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5959.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5959.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5959.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5959.0>: Starting to move bucket 530 INFO REPORT <11993.4095.0> 2011-01-03 13:15:02 =============================================================================== vbucketmigrator<0.4095.0>: Bucket 10 moved to the next server vbucketmigrator<0.4095.0>: Validate bucket states vbucketmigrator<0.4095.0>: 10 ok INFO REPORT <11993.4109.0> 2011-01-03 13:15:03 =============================================================================== vbucketmigrator<0.4109.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4109.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4109.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4109.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4109.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4109.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4109.0>: Starting to move bucket 11 INFO REPORT <0.5959.0> 2011-01-03 13:15:04 =============================================================================== vbucketmigrator<0.5959.0>: Bucket 530 moved to the next server vbucketmigrator<0.5959.0>: Validate bucket states vbucketmigrator<0.5959.0>: 530 ok INFO REPORT <0.5976.0> 2011-01-03 13:15:05 =============================================================================== vbucketmigrator<0.5976.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5976.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5976.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5976.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5976.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5976.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5976.0>: Starting to move bucket 531 INFO REPORT <11993.4109.0> 2011-01-03 13:15:05 =============================================================================== vbucketmigrator<0.4109.0>: Bucket 11 moved to the next server vbucketmigrator<0.4109.0>: Validate bucket states vbucketmigrator<0.4109.0>: 11 ok INFO REPORT <11993.4117.0> 2011-01-03 13:15:06 =============================================================================== vbucketmigrator<0.4117.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4117.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4117.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4117.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4117.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4117.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4117.0>: Starting to move bucket 12 INFO REPORT <0.93.0> 2011-01-03 13:15:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89307,409401}}, {active_buckets,["default"]}, {memory, [{total,19238264}, {processes,11506676}, {processes_used,11490084}, {system,7731588}, {atom,560301}, {atom_used,557531}, {binary,243176}, {code,4570913}, {ets,937844}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1319}, {memory_data,{4284698624,4187779072,{<0.299.0>,1457152}}}, {disk_data, [{"C:\\",48162864,57},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,88567808}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1310399,0}}, {context_switches,{420698,0}}, {garbage_collection,{79273,699091546,0}}, {io,{{input,28148896},{output,17656389}}}, {reductions,{389370376,2429342}}, {run_queue,0}, {runtime,{18813,93}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89308,376402}}, {active_buckets,["default"]}, {memory, [{total,14337920}, {processes,6553132}, {processes_used,6544932}, {system,7784788}, {atom,559813}, {atom_used,556363}, {binary,328560}, {code,4551541}, {ets,932444}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1250}, {memory_data,{4284698624,4192301056,{<11993.298.0>,1271780}}}, {disk_data, [{"C:\\",46243100,44},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,101019648}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1240899,0}}, {context_switches,{198549,0}}, {garbage_collection,{54670,421710842,0}}, {io,{{input,31680705},{output,13675896}}}, {reductions,{117240800,1233449}}, {run_queue,0}, {runtime,{10530,47}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89308,142400}}, {active_buckets,["default"]}, {memory, [{total,12633568}, {processes,5637036}, {processes_used,5623612}, {system,6996532}, {atom,541077}, {atom_used,528589}, {binary,324328}, {code,4280811}, {ets,438676}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,238}, {memory_data,{4284698624,768131072,{<10870.218.0>,1271780}}}, {disk_data, [{"C:\\",49423972,36},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3424342016}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{230320,0}}, {context_switches,{30809,0}}, {garbage_collection,{6433,23897879,0}}, {io,{{input,6349538},{output,2955212}}}, {reductions,{9725093,592934}}, {run_queue,0}, {runtime,{1825,172}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:15:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.11695906432748537]], [['ns_1@10.2.1.101'| 0.07058823529411762]], [['ns_1@10.2.1.102'| 0.09384164222873903]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.5976.0> 2011-01-03 13:15:09 =============================================================================== vbucketmigrator<0.5976.0>: Bucket 531 moved to the next server vbucketmigrator<0.5976.0>: Validate bucket states vbucketmigrator<0.5976.0>: 531 ok INFO REPORT <0.5994.0> 2011-01-03 13:15:10 =============================================================================== vbucketmigrator<0.5994.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5994.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5994.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5994.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.5994.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5994.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.5994.0>: Starting to move bucket 532 INFO REPORT <0.5994.0> 2011-01-03 13:15:10 =============================================================================== vbucketmigrator<0.5994.0>: Bucket 532 moved to the next server vbucketmigrator<0.5994.0>: Validate bucket states vbucketmigrator<0.5994.0>: 532 ok INFO REPORT <11993.4117.0> 2011-01-03 13:15:11 =============================================================================== vbucketmigrator<0.4117.0>: Bucket 12 moved to the next server vbucketmigrator<0.4117.0>: Validate bucket states vbucketmigrator<0.4117.0>: 12 ok INFO REPORT <0.6000.0> 2011-01-03 13:15:11 =============================================================================== vbucketmigrator<0.6000.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6000.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6000.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6000.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6000.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6000.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6000.0>: Starting to move bucket 533 INFO REPORT <0.85.0> 2011-01-03 13:15:13 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.6000.0> 2011-01-03 13:15:13 =============================================================================== vbucketmigrator<0.6000.0>: Bucket 533 moved to the next server vbucketmigrator<0.6000.0>: Validate bucket states vbucketmigrator<0.6000.0>: 533 ok INFO REPORT <11993.4133.0> 2011-01-03 13:15:14 =============================================================================== vbucketmigrator<0.4133.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4133.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4133.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4133.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4133.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4133.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4133.0>: Starting to move bucket 13 INFO REPORT <0.6008.0> 2011-01-03 13:15:14 =============================================================================== vbucketmigrator<0.6008.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6008.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6008.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6008.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6008.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6008.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6008.0>: Starting to move bucket 534 INFO REPORT <0.6008.0> 2011-01-03 13:15:17 =============================================================================== vbucketmigrator<0.6008.0>: Bucket 534 moved to the next server vbucketmigrator<0.6008.0>: Validate bucket states vbucketmigrator<0.6008.0>: 534 ok INFO REPORT <11993.4133.0> 2011-01-03 13:15:17 =============================================================================== vbucketmigrator<0.4133.0>: Bucket 13 moved to the next server vbucketmigrator<0.4133.0>: Validate bucket states vbucketmigrator<0.4133.0>: 13 ok INFO REPORT <0.6018.0> 2011-01-03 13:15:18 =============================================================================== vbucketmigrator<0.6018.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6018.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6018.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6018.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6018.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6018.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6018.0>: Starting to move bucket 535 INFO REPORT <0.110.0> 2011-01-03 13:15:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.14035087719298245]], [['ns_1@10.2.1.101'| 0.08235294117647063]], [['ns_1@10.2.1.102'| 0.11143695014662758]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4148.0> 2011-01-03 13:15:18 =============================================================================== vbucketmigrator<0.4148.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4148.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4148.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4148.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4148.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4148.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4148.0>: Starting to move bucket 14 INFO REPORT <0.6018.0> 2011-01-03 13:15:22 =============================================================================== vbucketmigrator<0.6018.0>: Bucket 535 moved to the next server vbucketmigrator<0.6018.0>: Validate bucket states vbucketmigrator<0.6018.0>: 535 ok INFO REPORT <0.6033.0> 2011-01-03 13:15:23 =============================================================================== vbucketmigrator<0.6033.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6033.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6033.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6033.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6033.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6033.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6033.0>: Starting to move bucket 536 INFO REPORT <11993.4148.0> 2011-01-03 13:15:25 =============================================================================== vbucketmigrator<0.4148.0>: Bucket 14 moved to the next server vbucketmigrator<0.4148.0>: Validate bucket states vbucketmigrator<0.4148.0>: 14 ok INFO REPORT <11993.4169.0> 2011-01-03 13:15:27 =============================================================================== vbucketmigrator<0.4169.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4169.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4169.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4169.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4169.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4169.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4169.0>: Starting to move bucket 15 INFO REPORT <0.6033.0> 2011-01-03 13:15:27 =============================================================================== vbucketmigrator<0.6033.0>: Bucket 536 moved to the next server vbucketmigrator<0.6033.0>: Validate bucket states vbucketmigrator<0.6033.0>: 536 ok INFO REPORT <0.110.0> 2011-01-03 13:15:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.14619883040935677]], [['ns_1@10.2.1.101'| 0.09411764705882353]], [['ns_1@10.2.1.102'| 0.12023460410557185]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6042.0> 2011-01-03 13:15:28 =============================================================================== vbucketmigrator<0.6042.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6042.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6042.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6042.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6042.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6042.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6042.0>: Starting to move bucket 537 INFO REPORT <0.6042.0> 2011-01-03 13:15:33 =============================================================================== vbucketmigrator<0.6042.0>: Bucket 537 moved to the next server vbucketmigrator<0.6042.0>: Validate bucket states vbucketmigrator<0.6042.0>: 537 ok INFO REPORT <0.6056.0> 2011-01-03 13:15:34 =============================================================================== vbucketmigrator<0.6056.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6056.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6056.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6056.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6056.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6056.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6056.0>: Starting to move bucket 538 INFO REPORT <11993.4169.0> 2011-01-03 13:15:36 =============================================================================== vbucketmigrator<0.4169.0>: Bucket 15 moved to the next server vbucketmigrator<0.4169.0>: Validate bucket states vbucketmigrator<0.4169.0>: 15 ok INFO REPORT <0.6056.0> 2011-01-03 13:15:37 =============================================================================== vbucketmigrator<0.6056.0>: Bucket 538 moved to the next server vbucketmigrator<0.6056.0>: Validate bucket states vbucketmigrator<0.6056.0>: 538 ok INFO REPORT <0.6071.0> 2011-01-03 13:15:38 =============================================================================== vbucketmigrator<0.6071.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6071.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6071.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6071.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6071.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6071.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6071.0>: Starting to move bucket 539 INFO REPORT <0.110.0> 2011-01-03 13:15:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.1578947368421053]], [['ns_1@10.2.1.101'| 0.09999999999999998]], [['ns_1@10.2.1.102'| 0.12903225806451613]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4190.0> 2011-01-03 13:15:38 =============================================================================== vbucketmigrator<0.4190.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4190.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4190.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4190.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4190.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4190.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4190.0>: Starting to move bucket 16 INFO REPORT <0.6071.0> 2011-01-03 13:15:41 =============================================================================== vbucketmigrator<0.6071.0>: Bucket 539 moved to the next server vbucketmigrator<0.6071.0>: Validate bucket states vbucketmigrator<0.6071.0>: 539 ok INFO REPORT <0.6081.0> 2011-01-03 13:15:42 =============================================================================== vbucketmigrator<0.6081.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6081.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6081.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6081.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6081.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6081.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6081.0>: Starting to move bucket 540 INFO REPORT <0.6081.0> 2011-01-03 13:15:45 =============================================================================== vbucketmigrator<0.6081.0>: Bucket 540 moved to the next server vbucketmigrator<0.6081.0>: Validate bucket states vbucketmigrator<0.6081.0>: 540 ok INFO REPORT <0.6089.0> 2011-01-03 13:15:46 =============================================================================== vbucketmigrator<0.6089.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6089.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6089.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6089.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6089.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6089.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6089.0>: Starting to move bucket 541 INFO REPORT <11993.4190.0> 2011-01-03 13:15:46 =============================================================================== vbucketmigrator<0.4190.0>: Bucket 16 moved to the next server vbucketmigrator<0.4190.0>: Validate bucket states vbucketmigrator<0.4190.0>: 16 ok INFO REPORT <11993.4217.0> 2011-01-03 13:15:47 =============================================================================== vbucketmigrator<0.4217.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4217.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4217.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4217.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4217.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4217.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4217.0>: Starting to move bucket 17 INFO REPORT <0.110.0> 2011-01-03 13:15:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.17543859649122806]], [['ns_1@10.2.1.101'| 0.09999999999999998]], [['ns_1@10.2.1.102'| 0.1378299120234604]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6089.0> 2011-01-03 13:15:49 =============================================================================== vbucketmigrator<0.6089.0>: Bucket 541 moved to the next server vbucketmigrator<0.6089.0>: Validate bucket states vbucketmigrator<0.6089.0>: 541 ok INFO REPORT <0.6106.0> 2011-01-03 13:15:50 =============================================================================== vbucketmigrator<0.6106.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6106.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6106.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6106.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6106.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6106.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6106.0>: Starting to move bucket 542 INFO REPORT <0.85.0> 2011-01-03 13:15:50 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.259.0> 2011-01-03 13:15:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 545 auth_errors 0 bucket_conns 32 bytes_read 5326498759 bytes_written 91231608 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 2096145 conn_yields 186 connection_structures 235 curr_connections 54 curr_items 1955671 curr_items_tot 3048157 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 2870 ep_commit_time 1 ep_commit_time_total 979 ep_data_age 105 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 152 ep_flush_duration_highwat 297 ep_flush_duration_total 1031 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 195647 ep_io_num_read 103220 ep_io_num_write 2761012 ep_io_read_bytes 146099644 ep_io_write_bytes 3907562418 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2382041819 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 693389 ep_num_eject_failures 1297672 ep_num_eject_replicas 842900 ep_num_expiry_pager_runs 0 ep_num_non_resident 1582455 ep_num_not_my_vbuckets 27247 ep_num_pager_runs 5 ep_num_value_ejects 1582929 ep_oom_errors 0 ep_overhead 39522087 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 93028 ep_storage_age 105 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 103220 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 4598142419 ep_total_del_items 0 ep_total_enqueued 3049779 ep_total_new_items 2759938 ep_total_persisted 2761011 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2421563906 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 544 tap_mutation_received 1574844 tap_mutation_sent 2341061 tap_opaque_received 513 tap_opaque_sent 1056 tap_vbucket_set_sent 1085 threads 4 time 1294089351 total_connections 879 uptime 1365 version 1.4.4_304_g7d5a132 INFO REPORT <0.6106.0> 2011-01-03 13:15:53 =============================================================================== vbucketmigrator<0.6106.0>: Bucket 542 moved to the next server vbucketmigrator<0.6106.0>: Validate bucket states vbucketmigrator<0.6106.0>: 542 ok INFO REPORT <0.6121.0> 2011-01-03 13:15:54 =============================================================================== vbucketmigrator<0.6121.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6121.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6121.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6121.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6121.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6121.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6121.0>: Starting to move bucket 543 INFO REPORT <11993.4217.0> 2011-01-03 13:15:55 =============================================================================== vbucketmigrator<0.4217.0>: Bucket 17 moved to the next server vbucketmigrator<0.4217.0>: Validate bucket states vbucketmigrator<0.4217.0>: 17 ok INFO REPORT <11993.4237.0> 2011-01-03 13:15:56 =============================================================================== vbucketmigrator<0.4237.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4237.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4237.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4237.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4237.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4237.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4237.0>: Starting to move bucket 18 INFO REPORT <0.6121.0> 2011-01-03 13:15:57 =============================================================================== vbucketmigrator<0.6121.0>: Bucket 543 moved to the next server vbucketmigrator<0.6121.0>: Validate bucket states vbucketmigrator<0.6121.0>: 543 ok INFO REPORT <0.110.0> 2011-01-03 13:15:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.1871345029239766]], [['ns_1@10.2.1.101'| 0.11176470588235299]], [['ns_1@10.2.1.102'| 0.14956011730205276]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6140.0> 2011-01-03 13:15:58 =============================================================================== vbucketmigrator<0.6140.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6140.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6140.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6140.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6140.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6140.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6140.0>: Starting to move bucket 544 INFO REPORT <0.6140.0> 2011-01-03 13:16:01 =============================================================================== vbucketmigrator<0.6140.0>: Bucket 544 moved to the next server vbucketmigrator<0.6140.0>: Validate bucket states vbucketmigrator<0.6140.0>: 544 ok INFO REPORT <0.6158.0> 2011-01-03 13:16:02 =============================================================================== vbucketmigrator<0.6158.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6158.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6158.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6158.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6158.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6158.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6158.0>: Starting to move bucket 545 INFO REPORT <11993.4237.0> 2011-01-03 13:16:04 =============================================================================== vbucketmigrator<0.4237.0>: Bucket 18 moved to the next server vbucketmigrator<0.4237.0>: Validate bucket states vbucketmigrator<0.4237.0>: 18 ok INFO REPORT <11993.4254.0> 2011-01-03 13:16:05 =============================================================================== vbucketmigrator<0.4254.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4254.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4254.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4254.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4254.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4254.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4254.0>: Starting to move bucket 19 INFO REPORT <0.6158.0> 2011-01-03 13:16:05 =============================================================================== vbucketmigrator<0.6158.0>: Bucket 545 moved to the next server vbucketmigrator<0.6158.0>: Validate bucket states vbucketmigrator<0.6158.0>: 545 ok INFO REPORT <0.6169.0> 2011-01-03 13:16:06 =============================================================================== vbucketmigrator<0.6169.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6169.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6169.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6169.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6169.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6169.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6169.0>: Starting to move bucket 546 INFO REPORT <0.93.0> 2011-01-03 13:16:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89367,407401}}, {active_buckets,["default"]}, {memory, [{total,18569624}, {processes,10834340}, {processes_used,10818252}, {system,7735284}, {atom,560301}, {atom_used,557531}, {binary,247760}, {code,4570913}, {ets,936524}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1379}, {memory_data,{4284698624,4211007488,{<0.299.0>,1457152}}}, {disk_data, [{"C:\\",48162864,57},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,57049088}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1370397,0}}, {context_switches,{436754,0}}, {garbage_collection,{82453,738599962,0}}, {io,{{input,29644371},{output,18238553}}}, {reductions,{400017618,2428152}}, {run_queue,1}, {runtime,{19266,110}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89368,374402}}, {active_buckets,["default"]}, {memory, [{total,14634784}, {processes,6817212}, {processes_used,6809012}, {system,7817572}, {atom,559813}, {atom_used,556363}, {binary,362392}, {code,4551541}, {ets,930980}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1310}, {memory_data,{4284698624,4239036416,{<11993.387.0>,1271680}}}, {disk_data, [{"C:\\",46243100,44},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,73408512}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1300897,0}}, {context_switches,{209781,0}}, {garbage_collection,{56964,445934330,0}}, {io,{{input,32340369},{output,14086893}}}, {reductions,{125037965,1079080}}, {run_queue,0}, {runtime,{10764,16}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89368,140400}}, {active_buckets,["default"]}, {memory, [{total,12469400}, {processes,5453204}, {processes_used,5439780}, {system,7016196}, {atom,541077}, {atom_used,528589}, {binary,309776}, {code,4280811}, {ets,472876}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,298}, {memory_data,{4284698624,889008128,{<10870.218.0>,1086308}}}, {disk_data, [{"C:\\",49423972,36},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3264667648}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{290318,0}}, {context_switches,{36411,0}}, {garbage_collection,{8006,31408735,0}}, {io,{{input,6606309},{output,3222106}}}, {reductions,{13221532,586274}}, {run_queue,0}, {runtime,{2230,62}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:16:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.19883040935672514]], [['ns_1@10.2.1.101'| 0.11764705882352944]], [['ns_1@10.2.1.102'| 0.15835777126099704]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6169.0> 2011-01-03 13:16:09 =============================================================================== vbucketmigrator<0.6169.0>: Bucket 546 moved to the next server vbucketmigrator<0.6169.0>: Validate bucket states vbucketmigrator<0.6169.0>: 546 ok INFO REPORT <0.6185.0> 2011-01-03 13:16:10 =============================================================================== vbucketmigrator<0.6185.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6185.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6185.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6185.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6185.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6185.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6185.0>: Starting to move bucket 547 INFO REPORT <0.6185.0> 2011-01-03 13:16:12 =============================================================================== vbucketmigrator<0.6185.0>: Bucket 547 moved to the next server vbucketmigrator<0.6185.0>: Validate bucket states vbucketmigrator<0.6185.0>: 547 ok INFO REPORT <0.85.0> 2011-01-03 13:16:13 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.6194.0> 2011-01-03 13:16:13 =============================================================================== vbucketmigrator<0.6194.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6194.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6194.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6194.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6194.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6194.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6194.0>: Starting to move bucket 548 INFO REPORT <11993.4254.0> 2011-01-03 13:16:16 =============================================================================== vbucketmigrator<0.4254.0>: Bucket 19 moved to the next server vbucketmigrator<0.4254.0>: Validate bucket states vbucketmigrator<0.4254.0>: 19 ok INFO REPORT <11993.4296.0> 2011-01-03 13:16:18 =============================================================================== vbucketmigrator<0.4296.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4296.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4296.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4296.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4296.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4296.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4296.0>: Starting to move bucket 20 INFO REPORT <0.110.0> 2011-01-03 13:16:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.216374269005848]], [['ns_1@10.2.1.101'| 0.11764705882352944]], [['ns_1@10.2.1.102'| 0.1671554252199413]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6194.0> 2011-01-03 13:16:25 =============================================================================== vbucketmigrator<0.6194.0>: Bucket 548 moved to the next server vbucketmigrator<0.6194.0>: Validate bucket states vbucketmigrator<0.6194.0>: 548 ok INFO REPORT <0.6238.0> 2011-01-03 13:16:27 =============================================================================== vbucketmigrator<0.6238.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6238.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6238.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6238.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6238.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6238.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6238.0>: Starting to move bucket 549 INFO REPORT <0.110.0> 2011-01-03 13:16:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.216374269005848]], [['ns_1@10.2.1.101'| 0.12352941176470589]], [['ns_1@10.2.1.102'| 0.1700879765395894]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4296.0> 2011-01-03 13:16:28 =============================================================================== vbucketmigrator<0.4296.0>: Bucket 20 moved to the next server vbucketmigrator<0.4296.0>: Validate bucket states vbucketmigrator<0.4296.0>: 20 ok INFO REPORT <0.6238.0> 2011-01-03 13:16:30 =============================================================================== vbucketmigrator<0.6238.0>: Bucket 549 moved to the next server vbucketmigrator<0.6238.0>: Validate bucket states vbucketmigrator<0.6238.0>: 549 ok INFO REPORT <11993.4330.0> 2011-01-03 13:16:31 =============================================================================== vbucketmigrator<0.4330.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4330.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4330.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4330.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4330.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4330.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4330.0>: Starting to move bucket 21 INFO REPORT <0.6258.0> 2011-01-03 13:16:31 =============================================================================== vbucketmigrator<0.6258.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6258.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6258.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6258.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6258.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6258.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6258.0>: Starting to move bucket 550 INFO REPORT <0.6258.0> 2011-01-03 13:16:35 =============================================================================== vbucketmigrator<0.6258.0>: Bucket 550 moved to the next server vbucketmigrator<0.6258.0>: Validate bucket states vbucketmigrator<0.6258.0>: 550 ok INFO REPORT <0.6275.0> 2011-01-03 13:16:36 =============================================================================== vbucketmigrator<0.6275.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6275.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6275.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6275.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6275.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6275.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6275.0>: Starting to move bucket 551 INFO REPORT <0.110.0> 2011-01-03 13:16:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.22807017543859653]], [['ns_1@10.2.1.101'| 0.12941176470588234]], [['ns_1@10.2.1.102'| 0.17888563049853368]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6275.0> 2011-01-03 13:16:39 =============================================================================== vbucketmigrator<0.6275.0>: Bucket 551 moved to the next server vbucketmigrator<0.6275.0>: Validate bucket states vbucketmigrator<0.6275.0>: 551 ok INFO REPORT <0.6294.0> 2011-01-03 13:16:40 =============================================================================== vbucketmigrator<0.6294.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6294.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6294.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6294.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6294.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6294.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6294.0>: Starting to move bucket 552 INFO REPORT <11993.4330.0> 2011-01-03 13:16:41 =============================================================================== vbucketmigrator<0.4330.0>: Bucket 21 moved to the next server vbucketmigrator<0.4330.0>: Validate bucket states vbucketmigrator<0.4330.0>: 21 ok INFO REPORT <11993.4355.0> 2011-01-03 13:16:44 =============================================================================== vbucketmigrator<0.4355.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4355.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4355.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4355.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4355.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4355.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4355.0>: Starting to move bucket 22 INFO REPORT <0.6294.0> 2011-01-03 13:16:44 =============================================================================== vbucketmigrator<0.6294.0>: Bucket 552 moved to the next server vbucketmigrator<0.6294.0>: Validate bucket states vbucketmigrator<0.6294.0>: 552 ok INFO REPORT <0.6311.0> 2011-01-03 13:16:45 =============================================================================== vbucketmigrator<0.6311.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6311.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6311.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6311.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6311.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6311.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6311.0>: Starting to move bucket 553 INFO REPORT <0.110.0> 2011-01-03 13:16:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.23976608187134507]], [['ns_1@10.2.1.101'| 0.1352941176470588]], [['ns_1@10.2.1.102'| 0.18768328445747806]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6311.0> 2011-01-03 13:16:48 =============================================================================== vbucketmigrator<0.6311.0>: Bucket 553 moved to the next server vbucketmigrator<0.6311.0>: Validate bucket states vbucketmigrator<0.6311.0>: 553 ok INFO REPORT <0.6380.0> 2011-01-03 13:16:49 =============================================================================== vbucketmigrator<0.6380.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6380.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6380.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6380.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6380.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6380.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6380.0>: Starting to move bucket 554 INFO REPORT <0.85.0> 2011-01-03 13:16:50 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.6380.0> 2011-01-03 13:16:53 =============================================================================== vbucketmigrator<0.6380.0>: Bucket 554 moved to the next server vbucketmigrator<0.6380.0>: Validate bucket states vbucketmigrator<0.6380.0>: 554 ok INFO REPORT <0.6395.0> 2011-01-03 13:16:54 =============================================================================== vbucketmigrator<0.6395.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6395.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6395.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6395.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6395.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6395.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6395.0>: Starting to move bucket 555 INFO REPORT <11993.4355.0> 2011-01-03 13:16:55 =============================================================================== vbucketmigrator<0.4355.0>: Bucket 22 moved to the next server vbucketmigrator<0.4355.0>: Validate bucket states vbucketmigrator<0.4355.0>: 22 ok INFO REPORT <0.110.0> 2011-01-03 13:16:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.2573099415204678]], [['ns_1@10.2.1.101'| 0.1352941176470588]], [['ns_1@10.2.1.102'| 0.19648093841642233]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6395.0> 2011-01-03 13:16:58 =============================================================================== vbucketmigrator<0.6395.0>: Bucket 555 moved to the next server vbucketmigrator<0.6395.0>: Validate bucket states vbucketmigrator<0.6395.0>: 555 ok INFO REPORT <11993.4395.0> 2011-01-03 13:16:58 =============================================================================== vbucketmigrator<0.4395.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4395.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4395.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4395.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4395.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4395.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4395.0>: Starting to move bucket 23 INFO REPORT <0.6415.0> 2011-01-03 13:16:59 =============================================================================== vbucketmigrator<0.6415.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6415.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6415.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6415.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6415.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6415.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6415.0>: Starting to move bucket 556 INFO REPORT <0.85.0> 2011-01-03 13:17:02 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <11993.4395.0> 2011-01-03 13:17:04 =============================================================================== vbucketmigrator<0.4395.0>: Bucket 23 moved to the next server vbucketmigrator<0.4395.0>: Validate bucket states vbucketmigrator<0.4395.0>: 23 ok INFO REPORT <0.6415.0> 2011-01-03 13:17:04 =============================================================================== vbucketmigrator<0.6415.0>: Bucket 556 moved to the next server vbucketmigrator<0.6415.0>: Validate bucket states vbucketmigrator<0.6415.0>: 556 ok INFO REPORT <11993.4416.0> 2011-01-03 13:17:05 =============================================================================== vbucketmigrator<0.4416.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4416.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4416.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4416.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4416.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4416.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4416.0>: Starting to move bucket 24 INFO REPORT <0.6435.0> 2011-01-03 13:17:05 =============================================================================== vbucketmigrator<0.6435.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6435.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6435.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6435.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6435.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6435.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6435.0>: Starting to move bucket 557 INFO REPORT <0.93.0> 2011-01-03 13:17:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89427,405401}}, {active_buckets,["default"]}, {memory, [{total,22045192}, {processes,14284220}, {processes_used,14271828}, {system,7760972}, {atom,560301}, {atom_used,557531}, {binary,236992}, {code,4570913}, {ets,970516}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1439}, {memory_data,{4284698624,4247453696,{<0.6124.0>,1086328}}}, {disk_data, [{"C:\\",48162864,58},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,180518912}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1430395,0}}, {context_switches,{456108,0}}, {garbage_collection,{86309,791211991,0}}, {io,{{input,32698964},{output,18977842}}}, {reductions,{413320106,2705051}}, {run_queue,0}, {runtime,{19968,125}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89428,372404}}, {active_buckets,["default"]}, {memory, [{total,16113552}, {processes,8262588}, {processes_used,8253908}, {system,7850964}, {atom,559813}, {atom_used,556363}, {binary,359608}, {code,4551541}, {ets,966940}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1370}, {memory_data,{4284698624,4231671808,{<11993.298.0>,1271780}}}, {disk_data, [{"C:\\",46243100,44},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,62201856}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1360895,0}}, {context_switches,{221828,0}}, {garbage_collection,{59647,473170613,0}}, {io,{{input,33842287},{output,15239003}}}, {reductions,{133208573,1086888}}, {run_queue,0}, {runtime,{11169,15}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89428,138400}}, {active_buckets,["default"]}, {memory, [{total,12825544}, {processes,5769972}, {processes_used,5756548}, {system,7055572}, {atom,541077}, {atom_used,528589}, {binary,315160}, {code,4280811}, {ets,506868}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,358}, {memory_data,{4284698624,1048477696,{<10870.218.0>,1271780}}}, {disk_data, [{"C:\\",49423972,37},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,3133894656}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{350316,0}}, {context_switches,{42289,0}}, {garbage_collection,{9665,39608350,0}}, {io,{{input,6877760},{output,3492199}}}, {reductions,{16855352,595450}}, {run_queue,0}, {runtime,{2496,94}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:17:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.26315789473684215]], [['ns_1@10.2.1.101'| 0.1470588235294118]], [['ns_1@10.2.1.102'| 0.2052785923753666]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:17:08 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.4416.0> 2011-01-03 13:17:09 =============================================================================== vbucketmigrator<0.4416.0>: Bucket 24 moved to the next server vbucketmigrator<0.4416.0>: Validate bucket states vbucketmigrator<0.4416.0>: 24 ok INFO REPORT <0.6435.0> 2011-01-03 13:17:09 =============================================================================== vbucketmigrator<0.6435.0>: Bucket 557 moved to the next server vbucketmigrator<0.6435.0>: Validate bucket states vbucketmigrator<0.6435.0>: 557 ok INFO REPORT <11993.4431.0> 2011-01-03 13:17:10 =============================================================================== vbucketmigrator<0.4431.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4431.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4431.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4431.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4431.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4431.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4431.0>: Starting to move bucket 25 INFO REPORT <0.6470.0> 2011-01-03 13:17:10 =============================================================================== vbucketmigrator<0.6470.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6470.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6470.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6470.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6470.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6470.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6470.0>: Starting to move bucket 558 INFO REPORT <11993.4431.0> 2011-01-03 13:17:15 =============================================================================== vbucketmigrator<0.4431.0>: Bucket 25 moved to the next server vbucketmigrator<0.4431.0>: Validate bucket states vbucketmigrator<0.4431.0>: 25 ok INFO REPORT <0.6470.0> 2011-01-03 13:17:15 =============================================================================== vbucketmigrator<0.6470.0>: Bucket 558 moved to the next server vbucketmigrator<0.6470.0>: Validate bucket states vbucketmigrator<0.6470.0>: 558 ok INFO REPORT <11993.4443.0> 2011-01-03 13:17:16 =============================================================================== vbucketmigrator<0.4443.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4443.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4443.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4443.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4443.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4443.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4443.0>: Starting to move bucket 26 INFO REPORT <0.6488.0> 2011-01-03 13:17:16 =============================================================================== vbucketmigrator<0.6488.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6488.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6488.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6488.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6488.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6488.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6488.0>: Starting to move bucket 559 INFO REPORT <0.110.0> 2011-01-03 13:17:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.2748538011695907]], [['ns_1@10.2.1.101'| 0.1588235294117647]], [['ns_1@10.2.1.102'| 0.21700879765395897]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6488.0> 2011-01-03 13:17:21 =============================================================================== vbucketmigrator<0.6488.0>: Bucket 559 moved to the next server vbucketmigrator<0.6488.0>: Validate bucket states vbucketmigrator<0.6488.0>: 559 ok INFO REPORT <0.6508.0> 2011-01-03 13:17:22 =============================================================================== vbucketmigrator<0.6508.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6508.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6508.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6508.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6508.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6508.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6508.0>: Starting to move bucket 560 INFO REPORT <11993.4443.0> 2011-01-03 13:17:22 =============================================================================== vbucketmigrator<0.4443.0>: Bucket 26 moved to the next server vbucketmigrator<0.4443.0>: Validate bucket states vbucketmigrator<0.4443.0>: 26 ok INFO REPORT <11993.4461.0> 2011-01-03 13:17:23 =============================================================================== vbucketmigrator<0.4461.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4461.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4461.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4461.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4461.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4461.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4461.0>: Starting to move bucket 27 INFO REPORT <11993.4461.0> 2011-01-03 13:17:27 =============================================================================== vbucketmigrator<0.4461.0>: Bucket 27 moved to the next server vbucketmigrator<0.4461.0>: Validate bucket states vbucketmigrator<0.4461.0>: 27 ok INFO REPORT <11993.4473.0> 2011-01-03 13:17:28 =============================================================================== vbucketmigrator<0.4473.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4473.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4473.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4473.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4473.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4473.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4473.0>: Starting to move bucket 28 INFO REPORT <0.6508.0> 2011-01-03 13:17:28 =============================================================================== vbucketmigrator<0.6508.0>: Bucket 560 moved to the next server vbucketmigrator<0.6508.0>: Validate bucket states vbucketmigrator<0.6508.0>: 560 ok INFO REPORT <0.110.0> 2011-01-03 13:17:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.2865497076023392]], [['ns_1@10.2.1.101'| 0.1705882352941176]], [['ns_1@10.2.1.102'| 0.22873900293255134]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6522.0> 2011-01-03 13:17:29 =============================================================================== vbucketmigrator<0.6522.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6522.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6522.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6522.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6522.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6522.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6522.0>: Starting to move bucket 561 INFO REPORT <0.259.0> 2011-01-03 13:17:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 564 auth_errors 0 bucket_conns 92 bytes_read 5615845497 bytes_written 98293100 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 2295899 conn_yields 186 connection_structures 235 curr_connections 114 curr_items 2040276 curr_items_tot 3212841 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 2969 ep_commit_time 1 ep_commit_time_total 1022 ep_data_age 180 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 152 ep_flush_duration_highwat 297 ep_flush_duration_total 1031 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 98456 ep_io_num_read 171948 ep_io_num_write 2858201 ep_io_read_bytes 243384008 ep_io_write_bytes 4045228842 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2606631718 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 665347 ep_num_eject_failures 1363215 ep_num_eject_replicas 853756 ep_num_expiry_pager_runs 0 ep_num_non_resident 1599733 ep_num_not_my_vbuckets 61975 ep_num_pager_runs 6 ep_num_value_ejects 1600330 ep_oom_errors 0 ep_overhead 42808357 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 257995 ep_storage_age 180 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 171948 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 4847093718 ep_total_del_items 0 ep_total_enqueued 3214746 ep_total_new_items 2856980 ep_total_persisted 2858200 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2649440075 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 563 tap_mutation_received 1574844 tap_mutation_sent 2461933 tap_opaque_received 513 tap_opaque_sent 1075 tap_vbucket_set_sent 1123 threads 4 time 1294089451 total_connections 958 uptime 1465 version 1.4.4_304_g7d5a132 INFO REPORT <11993.4473.0> 2011-01-03 13:17:33 =============================================================================== vbucketmigrator<0.4473.0>: Bucket 28 moved to the next server vbucketmigrator<0.4473.0>: Validate bucket states vbucketmigrator<0.4473.0>: 28 ok INFO REPORT <11993.4487.0> 2011-01-03 13:17:34 =============================================================================== vbucketmigrator<0.4487.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4487.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4487.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4487.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4487.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4487.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4487.0>: Starting to move bucket 29 INFO REPORT <0.6522.0> 2011-01-03 13:17:34 =============================================================================== vbucketmigrator<0.6522.0>: Bucket 561 moved to the next server vbucketmigrator<0.6522.0>: Validate bucket states vbucketmigrator<0.6522.0>: 561 ok INFO REPORT <0.6538.0> 2011-01-03 13:17:35 =============================================================================== vbucketmigrator<0.6538.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6538.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6538.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6538.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6538.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6538.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6538.0>: Starting to move bucket 562 INFO REPORT <0.110.0> 2011-01-03 13:17:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.29239766081871343]], [['ns_1@10.2.1.101'| 0.17647058823529416]], [['ns_1@10.2.1.102'| 0.23460410557184752]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4487.0> 2011-01-03 13:17:39 =============================================================================== vbucketmigrator<0.4487.0>: Bucket 29 moved to the next server vbucketmigrator<0.4487.0>: Validate bucket states vbucketmigrator<0.4487.0>: 29 ok INFO REPORT <0.6369.0> 2011-01-03 13:17:39 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <11993.4504.0> 2011-01-03 13:17:40 =============================================================================== vbucketmigrator<0.4504.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4504.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4504.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4504.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4504.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4504.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4504.0>: Starting to move bucket 30 INFO REPORT <0.6359.0> 2011-01-03 13:17:40 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.6350.0> 2011-01-03 13:17:40 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.6337.0> 2011-01-03 13:17:40 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.5711.0> 2011-01-03 13:17:41 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.5688.0> 2011-01-03 13:17:42 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.6538.0> 2011-01-03 13:17:42 =============================================================================== vbucketmigrator<0.6538.0>: Bucket 562 moved to the next server vbucketmigrator<0.6538.0>: Validate bucket states vbucketmigrator<0.6538.0>: 562 ok INFO REPORT <0.6327.0> 2011-01-03 13:17:42 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.6560.0> 2011-01-03 13:17:43 =============================================================================== vbucketmigrator<0.6560.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6560.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6560.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6560.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6560.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6560.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6560.0>: Starting to move bucket 563 INFO REPORT <0.5649.0> 2011-01-03 13:17:43 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <11993.4504.0> 2011-01-03 13:17:45 =============================================================================== vbucketmigrator<0.4504.0>: Bucket 30 moved to the next server vbucketmigrator<0.4504.0>: Validate bucket states vbucketmigrator<0.4504.0>: 30 ok INFO REPORT <0.6317.0> 2011-01-03 13:17:45 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <11993.4516.0> 2011-01-03 13:17:46 =============================================================================== vbucketmigrator<0.4516.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4516.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4516.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4516.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4516.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4516.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4516.0>: Starting to move bucket 31 INFO REPORT <0.110.0> 2011-01-03 13:17:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.30409356725146197]], [['ns_1@10.2.1.101'| 0.1823529411764706]], [['ns_1@10.2.1.102'| 0.2434017595307918]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6560.0> 2011-01-03 13:17:48 =============================================================================== vbucketmigrator<0.6560.0>: Bucket 563 moved to the next server vbucketmigrator<0.6560.0>: Validate bucket states vbucketmigrator<0.6560.0>: 563 ok INFO REPORT <0.6576.0> 2011-01-03 13:17:49 =============================================================================== vbucketmigrator<0.6576.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6576.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6576.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6576.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6576.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6576.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6576.0>: Starting to move bucket 564 INFO REPORT <0.85.0> 2011-01-03 13:17:50 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.4516.0> 2011-01-03 13:17:50 =============================================================================== vbucketmigrator<0.4516.0>: Bucket 31 moved to the next server vbucketmigrator<0.4516.0>: Validate bucket states vbucketmigrator<0.4516.0>: 31 ok INFO REPORT <11993.4529.0> 2011-01-03 13:17:51 =============================================================================== vbucketmigrator<0.4529.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4529.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4529.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4529.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4529.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4529.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4529.0>: Starting to move bucket 32 INFO REPORT <0.6576.0> 2011-01-03 13:17:55 =============================================================================== vbucketmigrator<0.6576.0>: Bucket 564 moved to the next server vbucketmigrator<0.6576.0>: Validate bucket states vbucketmigrator<0.6576.0>: 564 ok INFO REPORT <0.6594.0> 2011-01-03 13:17:56 =============================================================================== vbucketmigrator<0.6594.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6594.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6594.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6594.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6594.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6594.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6594.0>: Starting to move bucket 565 INFO REPORT <0.110.0> 2011-01-03 13:17:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.3099415204678363]], [['ns_1@10.2.1.101'| 0.1941176470588235]], [['ns_1@10.2.1.102'| 0.25219941348973607]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4529.0> 2011-01-03 13:18:01 =============================================================================== vbucketmigrator<0.4529.0>: Bucket 32 moved to the next server vbucketmigrator<0.4529.0>: Validate bucket states vbucketmigrator<0.4529.0>: 32 ok INFO REPORT <0.6594.0> 2011-01-03 13:18:01 =============================================================================== vbucketmigrator<0.6594.0>: Bucket 565 moved to the next server vbucketmigrator<0.6594.0>: Validate bucket states vbucketmigrator<0.6594.0>: 565 ok INFO REPORT <11993.4554.0> 2011-01-03 13:18:02 =============================================================================== vbucketmigrator<0.4554.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4554.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4554.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4554.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4554.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4554.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4554.0>: Starting to move bucket 33 INFO REPORT <0.6619.0> 2011-01-03 13:18:03 =============================================================================== vbucketmigrator<0.6619.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6619.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6619.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6619.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6619.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6619.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6619.0>: Starting to move bucket 566 INFO REPORT <0.93.0> 2011-01-03 13:18:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89487,403401}}, {active_buckets,["default"]}, {memory, [{total,15787488}, {processes,8005596}, {processes_used,7987660}, {system,7781892}, {atom,560301}, {atom_used,557531}, {binary,226208}, {code,4570913}, {ets,1006164}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1499}, {memory_data,{4284698624,4119662592,{<0.6280.0>,971844}}}, {disk_data, [{"C:\\",48162864,58},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,114319360}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1490393,0}}, {context_switches,{473532,0}}, {garbage_collection,{89730,835911195,0}}, {io,{{input,35165491},{output,20366192}}}, {reductions,{425340532,2456026}}, {run_queue,0}, {runtime,{20592,172}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89488,370402}}, {active_buckets,["default"]}, {memory, [{total,16778080}, {processes,8904724}, {processes_used,8896044}, {system,7873356}, {atom,559813}, {atom_used,556363}, {binary,382992}, {code,4551541}, {ets,965452}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1430}, {memory_data,{4284698624,4227035136,{<11993.387.0>,1757252}}}, {disk_data, [{"C:\\",46243100,44},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,71917568}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1420893,0}}, {context_switches,{233433,0}}, {garbage_collection,{62108,499815936,0}}, {io,{{input,34590320},{output,15662273}}}, {reductions,{141336103,1091038}}, {run_queue,0}, {runtime,{11528,47}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89488,136400}}, {active_buckets,["default"]}, {memory, [{total,12822632}, {processes,5725116}, {processes_used,5711692}, {system,7097516}, {atom,541077}, {atom_used,528589}, {binary,319832}, {code,4280811}, {ets,544364}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,418}, {memory_data,{4284698624,1177591808,{<10870.218.0>,1271780}}}, {disk_data, [{"C:\\",49423972,37},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2987933696}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{410314,0}}, {context_switches,{48202,0}}, {garbage_collection,{11380,47694527,0}}, {io,{{input,7874437},{output,3994656}}}, {reductions,{20440766,598858}}, {run_queue,0}, {runtime,{2792,31}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:18:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.3157894736842105]], [['ns_1@10.2.1.101'| 0.19999999999999996]], [['ns_1@10.2.1.102'| 0.25806451612903225]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6619.0> 2011-01-03 13:18:08 =============================================================================== vbucketmigrator<0.6619.0>: Bucket 566 moved to the next server vbucketmigrator<0.6619.0>: Validate bucket states vbucketmigrator<0.6619.0>: 566 ok INFO REPORT <0.6640.0> 2011-01-03 13:18:09 =============================================================================== vbucketmigrator<0.6640.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6640.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6640.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6640.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6640.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6640.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6640.0>: Starting to move bucket 567 INFO REPORT <11993.4554.0> 2011-01-03 13:18:11 =============================================================================== vbucketmigrator<0.4554.0>: Bucket 33 moved to the next server vbucketmigrator<0.4554.0>: Validate bucket states vbucketmigrator<0.4554.0>: 33 ok INFO REPORT <11993.4575.0> 2011-01-03 13:18:13 =============================================================================== vbucketmigrator<0.4575.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4575.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4575.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4575.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4575.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4575.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4575.0>: Starting to move bucket 34 INFO REPORT <0.6640.0> 2011-01-03 13:18:14 =============================================================================== vbucketmigrator<0.6640.0>: Bucket 567 moved to the next server vbucketmigrator<0.6640.0>: Validate bucket states vbucketmigrator<0.6640.0>: 567 ok INFO REPORT <0.6666.0> 2011-01-03 13:18:15 =============================================================================== vbucketmigrator<0.6666.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6666.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6666.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6666.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6666.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6666.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6666.0>: Starting to move bucket 568 INFO REPORT <0.110.0> 2011-01-03 13:18:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.32748538011695905]], [['ns_1@10.2.1.101'| 0.20588235294117652]], [['ns_1@10.2.1.102'| 0.2668621700879765]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4575.0> 2011-01-03 13:18:20 =============================================================================== vbucketmigrator<0.4575.0>: Bucket 34 moved to the next server vbucketmigrator<0.4575.0>: Validate bucket states vbucketmigrator<0.4575.0>: 34 ok INFO REPORT <0.6666.0> 2011-01-03 13:18:21 =============================================================================== vbucketmigrator<0.6666.0>: Bucket 568 moved to the next server vbucketmigrator<0.6666.0>: Validate bucket states vbucketmigrator<0.6666.0>: 568 ok INFO REPORT <11993.4603.0> 2011-01-03 13:18:22 =============================================================================== vbucketmigrator<0.4603.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4603.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4603.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4603.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4603.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4603.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4603.0>: Starting to move bucket 35 INFO REPORT <0.6691.0> 2011-01-03 13:18:22 =============================================================================== vbucketmigrator<0.6691.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6691.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6691.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6691.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6691.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6691.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6691.0>: Starting to move bucket 569 INFO REPORT <0.6691.0> 2011-01-03 13:18:27 =============================================================================== vbucketmigrator<0.6691.0>: Bucket 569 moved to the next server vbucketmigrator<0.6691.0>: Validate bucket states vbucketmigrator<0.6691.0>: 569 ok INFO REPORT <0.6711.0> 2011-01-03 13:18:28 =============================================================================== vbucketmigrator<0.6711.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6711.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6711.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6711.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6711.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6711.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6711.0>: Starting to move bucket 570 INFO REPORT <0.110.0> 2011-01-03 13:18:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.3391812865497076]], [['ns_1@10.2.1.101'| 0.21176470588235297]], [['ns_1@10.2.1.102'| 0.2756598240469208]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4603.0> 2011-01-03 13:18:30 =============================================================================== vbucketmigrator<0.4603.0>: Bucket 35 moved to the next server vbucketmigrator<0.4603.0>: Validate bucket states vbucketmigrator<0.4603.0>: 35 ok INFO REPORT <11993.4623.0> 2011-01-03 13:18:31 =============================================================================== vbucketmigrator<0.4623.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4623.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4623.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4623.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4623.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4623.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4623.0>: Starting to move bucket 36 INFO REPORT <0.6711.0> 2011-01-03 13:18:33 =============================================================================== vbucketmigrator<0.6711.0>: Bucket 570 moved to the next server vbucketmigrator<0.6711.0>: Validate bucket states vbucketmigrator<0.6711.0>: 570 ok INFO REPORT <0.6733.0> 2011-01-03 13:18:34 =============================================================================== vbucketmigrator<0.6733.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6733.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6733.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6733.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6733.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6733.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6733.0>: Starting to move bucket 571 INFO REPORT <0.110.0> 2011-01-03 13:18:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.3450292397660819]], [['ns_1@10.2.1.101'| 0.21764705882352942]], [['ns_1@10.2.1.102'| 0.281524926686217]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4623.0> 2011-01-03 13:18:38 =============================================================================== vbucketmigrator<0.4623.0>: Bucket 36 moved to the next server vbucketmigrator<0.4623.0>: Validate bucket states vbucketmigrator<0.4623.0>: 36 ok INFO REPORT <0.6733.0> 2011-01-03 13:18:39 =============================================================================== vbucketmigrator<0.6733.0>: Bucket 571 moved to the next server vbucketmigrator<0.6733.0>: Validate bucket states vbucketmigrator<0.6733.0>: 571 ok INFO REPORT <11993.4645.0> 2011-01-03 13:18:40 =============================================================================== vbucketmigrator<0.4645.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4645.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4645.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4645.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4645.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4645.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4645.0>: Starting to move bucket 37 INFO REPORT <0.6759.0> 2011-01-03 13:18:40 =============================================================================== vbucketmigrator<0.6759.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6759.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6759.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6759.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6759.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6759.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6759.0>: Starting to move bucket 572 INFO REPORT <0.85.0> 2011-01-03 13:18:41 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.6759.0> 2011-01-03 13:18:46 =============================================================================== vbucketmigrator<0.6759.0>: Bucket 572 moved to the next server vbucketmigrator<0.6759.0>: Validate bucket states vbucketmigrator<0.6759.0>: 572 ok INFO REPORT <0.6777.0> 2011-01-03 13:18:47 =============================================================================== vbucketmigrator<0.6777.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6777.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6777.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6777.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6777.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6777.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6777.0>: Starting to move bucket 573 INFO REPORT <0.110.0> 2011-01-03 13:18:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.35672514619883045]], [['ns_1@10.2.1.101'| 0.22352941176470587]], [['ns_1@10.2.1.102'| 0.29032258064516125]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4645.0> 2011-01-03 13:18:50 =============================================================================== vbucketmigrator<0.4645.0>: Bucket 37 moved to the next server vbucketmigrator<0.4645.0>: Validate bucket states vbucketmigrator<0.4645.0>: 37 ok INFO REPORT <11993.4667.0> 2011-01-03 13:18:51 =============================================================================== vbucketmigrator<0.4667.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4667.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4667.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4667.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4667.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4667.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4667.0>: Starting to move bucket 38 INFO REPORT <0.6777.0> 2011-01-03 13:18:52 =============================================================================== vbucketmigrator<0.6777.0>: Bucket 573 moved to the next server vbucketmigrator<0.6777.0>: Validate bucket states vbucketmigrator<0.6777.0>: 573 ok INFO REPORT <0.6797.0> 2011-01-03 13:18:53 =============================================================================== vbucketmigrator<0.6797.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6797.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6797.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6797.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6797.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6797.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6797.0>: Starting to move bucket 574 INFO REPORT <0.110.0> 2011-01-03 13:18:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.36257309941520466]], [['ns_1@10.2.1.101'| 0.22941176470588232]], [['ns_1@10.2.1.102'| 0.29618768328445744]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6797.0> 2011-01-03 13:18:58 =============================================================================== vbucketmigrator<0.6797.0>: Bucket 574 moved to the next server vbucketmigrator<0.6797.0>: Validate bucket states vbucketmigrator<0.6797.0>: 574 ok INFO REPORT <0.6825.0> 2011-01-03 13:18:59 =============================================================================== vbucketmigrator<0.6825.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6825.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6825.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6825.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6825.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6825.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6825.0>: Starting to move bucket 575 INFO REPORT <11993.4667.0> 2011-01-03 13:19:01 =============================================================================== vbucketmigrator<0.4667.0>: Bucket 38 moved to the next server vbucketmigrator<0.4667.0>: Validate bucket states vbucketmigrator<0.4667.0>: 38 ok INFO REPORT <11993.4692.0> 2011-01-03 13:19:02 =============================================================================== vbucketmigrator<0.4692.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4692.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4692.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4692.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4692.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4692.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4692.0>: Starting to move bucket 39 INFO REPORT <0.6825.0> 2011-01-03 13:19:04 =============================================================================== vbucketmigrator<0.6825.0>: Bucket 575 moved to the next server vbucketmigrator<0.6825.0>: Validate bucket states vbucketmigrator<0.6825.0>: 575 ok INFO REPORT <0.6843.0> 2011-01-03 13:19:05 =============================================================================== vbucketmigrator<0.6843.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6843.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6843.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6843.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6843.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6843.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6843.0>: Starting to move bucket 576 INFO REPORT <0.93.0> 2011-01-03 13:19:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89547,401401}}, {active_buckets,["default"]}, {memory, [{total,16977304}, {processes,9136076}, {processes_used,9117636}, {system,7841228}, {atom,560301}, {atom_used,557531}, {binary,251608}, {code,4570913}, {ets,1040356}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1559}, {memory_data,{4284698624,4171427840,{<0.299.0>,900736}}}, {disk_data, [{"C:\\",48162864,58},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,95227904}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1550391,0}}, {context_switches,{490094,0}}, {garbage_collection,{92992,880361465,0}}, {io,{{input,38123990},{output,21019984}}}, {reductions,{436981104,2455416}}, {run_queue,0}, {runtime,{21075,109}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89548,368402}}, {active_buckets,["default"]}, {memory, [{total,15936640}, {processes,8065140}, {processes_used,8056460}, {system,7871500}, {atom,559813}, {atom_used,556363}, {binary,347240}, {code,4551541}, {ets,999468}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1490}, {memory_data,{4284698624,4177469440,{<11993.4379.0>,2357508}}}, {disk_data, [{"C:\\",46243100,44},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,95166464}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1480891,0}}, {context_switches,{244866,0}}, {garbage_collection,{64633,527152758,0}}, {io,{{input,35385924},{output,16079782}}}, {reductions,{149680716,1108159}}, {run_queue,0}, {runtime,{11731,0}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89548,134400}}, {active_buckets,["default"]}, {memory, [{total,12891104}, {processes,5783444}, {processes_used,5770020}, {system,7107660}, {atom,541077}, {atom_used,528589}, {binary,295936}, {code,4280811}, {ets,578396}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,478}, {memory_data,{4284698624,1317900288,{<10870.218.0>,1086308}}}, {disk_data, [{"C:\\",49423972,37},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2876907520}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{470312,0}}, {context_switches,{53760,0}}, {garbage_collection,{13058,56544443,0}}, {io,{{input,8148126},{output,4265909}}}, {reductions,{24061647,607233}}, {run_queue,0}, {runtime,{2917,16}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:19:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.3742690058479532]], [['ns_1@10.2.1.101'| 0.23529411764705888]], [['ns_1@10.2.1.102'| 0.3049853372434017]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6843.0> 2011-01-03 13:19:10 =============================================================================== vbucketmigrator<0.6843.0>: Bucket 576 moved to the next server vbucketmigrator<0.6843.0>: Validate bucket states vbucketmigrator<0.6843.0>: 576 ok INFO REPORT <0.6862.0> 2011-01-03 13:19:11 =============================================================================== vbucketmigrator<0.6862.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6862.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6862.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6862.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6862.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6862.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6862.0>: Starting to move bucket 577 INFO REPORT <0.259.0> 2011-01-03 13:19:11 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 580 auth_errors 0 bucket_conns 2 bytes_read 5645381528 bytes_written 99968633 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 2316286 conn_yields 186 connection_structures 235 curr_connections 24 curr_items 1984912 curr_items_tot 3228564 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 3050 ep_commit_time 0 ep_commit_time_total 1069 ep_data_age 321 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 152 ep_flush_duration_highwat 297 ep_flush_duration_total 1031 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 19062 ep_io_num_read 231395 ep_io_num_write 2937594 ep_io_read_bytes 327533542 ep_io_write_bytes 4157687448 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2266834348 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 850822 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1859410 ep_num_not_my_vbuckets 66604 ep_num_pager_runs 6 ep_num_value_ejects 1860015 ep_oom_errors 0 ep_overhead 39723047 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 273747 ep_storage_age 318 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 231395 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 4870855348 ep_total_del_items 0 ep_total_enqueued 3230498 ep_total_new_items 2936242 ep_total_persisted 2937593 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2306557395 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 579 tap_mutation_received 1574844 tap_mutation_sent 2564510 tap_opaque_received 513 tap_opaque_sent 1091 tap_vbucket_set_sent 1155 threads 4 time 1294089551 total_connections 974 uptime 1565 version 1.4.4_304_g7d5a132 INFO REPORT <11993.4692.0> 2011-01-03 13:19:13 =============================================================================== vbucketmigrator<0.4692.0>: Bucket 39 moved to the next server vbucketmigrator<0.4692.0>: Validate bucket states vbucketmigrator<0.4692.0>: 39 ok INFO REPORT <11993.4732.0> 2011-01-03 13:19:14 =============================================================================== vbucketmigrator<0.4732.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4732.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4732.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4732.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4732.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4732.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4732.0>: Starting to move bucket 40 INFO REPORT <0.6862.0> 2011-01-03 13:19:16 =============================================================================== vbucketmigrator<0.6862.0>: Bucket 577 moved to the next server vbucketmigrator<0.6862.0>: Validate bucket states vbucketmigrator<0.6862.0>: 577 ok INFO REPORT <0.6874.0> 2011-01-03 13:19:17 =============================================================================== vbucketmigrator<0.6874.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6874.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6874.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6874.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6874.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6874.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6874.0>: Starting to move bucket 578 INFO REPORT <0.110.0> 2011-01-03 13:19:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.38596491228070173]], [['ns_1@10.2.1.101'| 0.24117647058823533]], [['ns_1@10.2.1.102'| 0.3137829912023461]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6874.0> 2011-01-03 13:19:21 =============================================================================== vbucketmigrator<0.6874.0>: Bucket 578 moved to the next server vbucketmigrator<0.6874.0>: Validate bucket states vbucketmigrator<0.6874.0>: 578 ok INFO REPORT <0.6892.0> 2011-01-03 13:19:22 =============================================================================== vbucketmigrator<0.6892.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6892.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6892.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6892.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6892.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6892.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6892.0>: Starting to move bucket 579 INFO REPORT <0.6892.0> 2011-01-03 13:19:25 =============================================================================== vbucketmigrator<0.6892.0>: Bucket 579 moved to the next server vbucketmigrator<0.6892.0>: Validate bucket states vbucketmigrator<0.6892.0>: 579 ok INFO REPORT <0.6902.0> 2011-01-03 13:19:26 =============================================================================== vbucketmigrator<0.6902.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6902.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6902.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6902.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6902.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6902.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6902.0>: Starting to move bucket 580 INFO REPORT <11993.4732.0> 2011-01-03 13:19:27 =============================================================================== vbucketmigrator<0.4732.0>: Bucket 40 moved to the next server vbucketmigrator<0.4732.0>: Validate bucket states vbucketmigrator<0.4732.0>: 40 ok INFO REPORT <0.110.0> 2011-01-03 13:19:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.4035087719298246]], [['ns_1@10.2.1.101'| 0.24117647058823533]], [['ns_1@10.2.1.102'| 0.32258064516129037]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.6902.0> 2011-01-03 13:19:28 =============================================================================== vbucketmigrator<0.6902.0>: Bucket 580 moved to the next server vbucketmigrator<0.6902.0>: Validate bucket states vbucketmigrator<0.6902.0>: 580 ok INFO REPORT <11993.4766.0> 2011-01-03 13:19:28 =============================================================================== vbucketmigrator<0.4766.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4766.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4766.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4766.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4766.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4766.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4766.0>: Starting to move bucket 41 INFO REPORT <0.6920.0> 2011-01-03 13:19:29 =============================================================================== vbucketmigrator<0.6920.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6920.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6920.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6920.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6920.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6920.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6920.0>: Starting to move bucket 581 INFO REPORT <0.6920.0> 2011-01-03 13:19:33 =============================================================================== vbucketmigrator<0.6920.0>: Bucket 581 moved to the next server vbucketmigrator<0.6920.0>: Validate bucket states vbucketmigrator<0.6920.0>: 581 ok INFO REPORT <0.6949.0> 2011-01-03 13:19:34 =============================================================================== vbucketmigrator<0.6949.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6949.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6949.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6949.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6949.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6949.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6949.0>: Starting to move bucket 582 INFO REPORT <11993.4766.0> 2011-01-03 13:19:37 =============================================================================== vbucketmigrator<0.4766.0>: Bucket 41 moved to the next server vbucketmigrator<0.4766.0>: Validate bucket states vbucketmigrator<0.4766.0>: 41 ok INFO REPORT <0.85.0> 2011-01-03 13:19:38 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.110.0> 2011-01-03 13:19:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.41520467836257313]], [['ns_1@10.2.1.101'| 0.24705882352941178]], [['ns_1@10.2.1.102'| 0.33137829912023464]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4791.0> 2011-01-03 13:19:38 =============================================================================== vbucketmigrator<0.4791.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4791.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4791.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4791.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4791.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4791.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4791.0>: Starting to move bucket 42 INFO REPORT <0.6949.0> 2011-01-03 13:19:39 =============================================================================== vbucketmigrator<0.6949.0>: Bucket 582 moved to the next server vbucketmigrator<0.6949.0>: Validate bucket states vbucketmigrator<0.6949.0>: 582 ok INFO REPORT <0.6968.0> 2011-01-03 13:19:40 =============================================================================== vbucketmigrator<0.6968.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6968.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6968.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6968.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6968.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6968.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6968.0>: Starting to move bucket 583 INFO REPORT <11993.4791.0> 2011-01-03 13:19:42 =============================================================================== vbucketmigrator<0.4791.0>: Bucket 42 moved to the next server vbucketmigrator<0.4791.0>: Validate bucket states vbucketmigrator<0.4791.0>: 42 ok INFO REPORT <11993.4806.0> 2011-01-03 13:19:43 =============================================================================== vbucketmigrator<0.4806.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4806.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4806.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4806.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4806.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4806.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4806.0>: Starting to move bucket 43 INFO REPORT <0.6968.0> 2011-01-03 13:19:45 =============================================================================== vbucketmigrator<0.6968.0>: Bucket 583 moved to the next server vbucketmigrator<0.6968.0>: Validate bucket states vbucketmigrator<0.6968.0>: 583 ok INFO REPORT <0.6980.0> 2011-01-03 13:19:46 =============================================================================== vbucketmigrator<0.6980.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6980.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6980.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6980.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6980.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6980.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6980.0>: Starting to move bucket 584 INFO REPORT <11993.4806.0> 2011-01-03 13:19:48 =============================================================================== vbucketmigrator<0.4806.0>: Bucket 43 moved to the next server vbucketmigrator<0.4806.0>: Validate bucket states vbucketmigrator<0.4806.0>: 43 ok INFO REPORT <0.110.0> 2011-01-03 13:19:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.42690058479532167]], [['ns_1@10.2.1.101'| 0.2588235294117647]], [['ns_1@10.2.1.102'| 0.343108504398827]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4816.0> 2011-01-03 13:19:49 =============================================================================== vbucketmigrator<0.4816.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4816.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4816.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4816.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4816.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4816.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4816.0>: Starting to move bucket 44 INFO REPORT <0.6980.0> 2011-01-03 13:19:50 =============================================================================== vbucketmigrator<0.6980.0>: Bucket 584 moved to the next server vbucketmigrator<0.6980.0>: Validate bucket states vbucketmigrator<0.6980.0>: 584 ok INFO REPORT <0.6997.0> 2011-01-03 13:19:51 =============================================================================== vbucketmigrator<0.6997.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6997.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6997.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6997.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.6997.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6997.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.6997.0>: Starting to move bucket 585 INFO REPORT <11993.4816.0> 2011-01-03 13:19:53 =============================================================================== vbucketmigrator<0.4816.0>: Bucket 44 moved to the next server vbucketmigrator<0.4816.0>: Validate bucket states vbucketmigrator<0.4816.0>: 44 ok INFO REPORT <0.6997.0> 2011-01-03 13:19:54 =============================================================================== vbucketmigrator<0.6997.0>: Bucket 585 moved to the next server vbucketmigrator<0.6997.0>: Validate bucket states vbucketmigrator<0.6997.0>: 585 ok INFO REPORT <11993.4831.0> 2011-01-03 13:19:54 =============================================================================== vbucketmigrator<0.4831.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4831.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4831.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4831.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4831.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4831.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4831.0>: Starting to move bucket 45 INFO REPORT <0.7006.0> 2011-01-03 13:19:55 =============================================================================== vbucketmigrator<0.7006.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7006.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7006.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7006.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7006.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7006.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7006.0>: Starting to move bucket 586 INFO REPORT <0.6931.0> 2011-01-03 13:19:57 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.6940.0> 2011-01-03 13:19:57 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.110.0> 2011-01-03 13:19:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.4327485380116959]], [['ns_1@10.2.1.101'| 0.2705882352941177]], [['ns_1@10.2.1.102'| 0.3519061583577713]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7006.0> 2011-01-03 13:19:58 =============================================================================== vbucketmigrator<0.7006.0>: Bucket 586 moved to the next server vbucketmigrator<0.7006.0>: Validate bucket states vbucketmigrator<0.7006.0>: 586 ok INFO REPORT <11993.4831.0> 2011-01-03 13:19:58 =============================================================================== vbucketmigrator<0.4831.0>: Bucket 45 moved to the next server vbucketmigrator<0.4831.0>: Validate bucket states vbucketmigrator<0.4831.0>: 45 ok INFO REPORT <0.7019.0> 2011-01-03 13:19:59 =============================================================================== vbucketmigrator<0.7019.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7019.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7019.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7019.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7019.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7019.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7019.0>: Starting to move bucket 587 INFO REPORT <11993.4847.0> 2011-01-03 13:19:59 =============================================================================== vbucketmigrator<0.4847.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4847.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4847.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4847.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4847.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4847.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4847.0>: Starting to move bucket 46 INFO REPORT <0.7019.0> 2011-01-03 13:20:04 =============================================================================== vbucketmigrator<0.7019.0>: Bucket 587 moved to the next server vbucketmigrator<0.7019.0>: Validate bucket states vbucketmigrator<0.7019.0>: 587 ok INFO REPORT <11993.4847.0> 2011-01-03 13:20:04 =============================================================================== vbucketmigrator<0.4847.0>: Bucket 46 moved to the next server vbucketmigrator<0.4847.0>: Validate bucket states vbucketmigrator<0.4847.0>: 46 ok INFO REPORT <0.7033.0> 2011-01-03 13:20:05 =============================================================================== vbucketmigrator<0.7033.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7033.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7033.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7033.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7033.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7033.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7033.0>: Starting to move bucket 588 INFO REPORT <11993.4859.0> 2011-01-03 13:20:05 =============================================================================== vbucketmigrator<0.4859.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4859.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4859.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4859.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4859.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4859.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4859.0>: Starting to move bucket 47 INFO REPORT <0.93.0> 2011-01-03 13:20:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89607,399401}}, {active_buckets,["default"]}, {memory, [{total,17751520}, {processes,9827572}, {processes_used,9810908}, {system,7923948}, {atom,560301}, {atom_used,557531}, {binary,333640}, {code,4570913}, {ets,1038788}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1619}, {memory_data,{4284698624,4192673792,{<0.299.0>,1457152}}}, {disk_data, [{"C:\\",48162864,58},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,85721088}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1610405,0}}, {context_switches,{506682,0}}, {garbage_collection,{96182,918606370,0}}, {io,{{input,39312584},{output,21635846}}}, {reductions,{447805946,2235426}}, {run_queue,0}, {runtime,{22027,203}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89608,366400}}, {active_buckets,["default"]}, {memory, [{total,17017496}, {processes,9066492}, {processes_used,9057812}, {system,7951004}, {atom,559813}, {atom_used,556363}, {binary,390952}, {code,4551541}, {ets,1035180}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1550}, {memory_data,{4284698624,4201537536,{<11993.4451.0>,3328652}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,47169536}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1540967,78}}, {context_switches,{256801,0}}, {garbage_collection,{67106,553808373,0}}, {io,{{input,36950479},{output,17337655}}}, {reductions,{158240280,1253704}}, {run_queue,0}, {runtime,{11996,0}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89608,132400}}, {active_buckets,["default"]}, {memory, [{total,13123688}, {processes,5959276}, {processes_used,5945852}, {system,7164412}, {atom,541077}, {atom_used,528589}, {binary,318440}, {code,4280811}, {ets,612652}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,538}, {memory_data,{4284698624,1424371712,{<10870.218.0>,1271780}}}, {disk_data, [{"C:\\",49423972,38},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2727419904}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{530325,0}}, {context_switches,{59280,0}}, {garbage_collection,{14637,64919107,0}}, {io,{{input,8414036},{output,4534655}}}, {reductions,{27607618,567044}}, {run_queue,0}, {runtime,{3104,16}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:20:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.45029239766081874]], [['ns_1@10.2.1.101'| 0.27647058823529413]], [['ns_1@10.2.1.102'| 0.36363636363636365]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4859.0> 2011-01-03 13:20:09 =============================================================================== vbucketmigrator<0.4859.0>: Bucket 47 moved to the next server vbucketmigrator<0.4859.0>: Validate bucket states vbucketmigrator<0.4859.0>: 47 ok INFO REPORT <0.7033.0> 2011-01-03 13:20:10 =============================================================================== vbucketmigrator<0.7033.0>: Bucket 588 moved to the next server vbucketmigrator<0.7033.0>: Validate bucket states vbucketmigrator<0.7033.0>: 588 ok INFO REPORT <11993.4872.0> 2011-01-03 13:20:10 =============================================================================== vbucketmigrator<0.4872.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4872.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4872.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4872.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4872.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4872.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4872.0>: Starting to move bucket 48 INFO REPORT <0.7067.0> 2011-01-03 13:20:11 =============================================================================== vbucketmigrator<0.7067.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7067.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7067.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7067.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7067.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7067.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7067.0>: Starting to move bucket 589 INFO REPORT <11993.4872.0> 2011-01-03 13:20:15 =============================================================================== vbucketmigrator<0.4872.0>: Bucket 48 moved to the next server vbucketmigrator<0.4872.0>: Validate bucket states vbucketmigrator<0.4872.0>: 48 ok INFO REPORT <0.7067.0> 2011-01-03 13:20:16 =============================================================================== vbucketmigrator<0.7067.0>: Bucket 589 moved to the next server vbucketmigrator<0.7067.0>: Validate bucket states vbucketmigrator<0.7067.0>: 589 ok INFO REPORT <11993.4883.0> 2011-01-03 13:20:16 =============================================================================== vbucketmigrator<0.4883.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4883.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4883.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4883.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4883.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4883.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4883.0>: Starting to move bucket 49 INFO REPORT <0.7079.0> 2011-01-03 13:20:17 =============================================================================== vbucketmigrator<0.7079.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7079.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7079.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7079.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7079.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7079.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7079.0>: Starting to move bucket 590 INFO REPORT <0.110.0> 2011-01-03 13:20:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.45614035087719296]], [['ns_1@10.2.1.101'| 0.2941176470588235]], [['ns_1@10.2.1.102'| 0.375366568914956]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.4883.0> 2011-01-03 13:20:21 =============================================================================== vbucketmigrator<0.4883.0>: Bucket 49 moved to the next server vbucketmigrator<0.4883.0>: Validate bucket states vbucketmigrator<0.4883.0>: 49 ok INFO REPORT <0.7079.0> 2011-01-03 13:20:21 =============================================================================== vbucketmigrator<0.7079.0>: Bucket 590 moved to the next server vbucketmigrator<0.7079.0>: Validate bucket states vbucketmigrator<0.7079.0>: 590 ok INFO REPORT <11993.4900.0> 2011-01-03 13:20:22 =============================================================================== vbucketmigrator<0.4900.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4900.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4900.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4900.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4900.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4900.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4900.0>: Starting to move bucket 50 INFO REPORT <0.7092.0> 2011-01-03 13:20:22 =============================================================================== vbucketmigrator<0.7092.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7092.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7092.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7092.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7092.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7092.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7092.0>: Starting to move bucket 591 INFO REPORT <0.7092.0> 2011-01-03 13:20:26 =============================================================================== vbucketmigrator<0.7092.0>: Bucket 591 moved to the next server vbucketmigrator<0.7092.0>: Validate bucket states vbucketmigrator<0.7092.0>: 591 ok INFO REPORT <11993.4900.0> 2011-01-03 13:20:26 =============================================================================== vbucketmigrator<0.4900.0>: Bucket 50 moved to the next server vbucketmigrator<0.4900.0>: Validate bucket states vbucketmigrator<0.4900.0>: 50 ok INFO REPORT <0.7103.0> 2011-01-03 13:20:27 =============================================================================== vbucketmigrator<0.7103.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7103.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7103.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7103.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7103.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7103.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7103.0>: Starting to move bucket 592 INFO REPORT <0.85.0> 2011-01-03 13:20:27 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.4913.0> 2011-01-03 13:20:27 =============================================================================== vbucketmigrator<0.4913.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4913.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4913.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4913.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4913.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4913.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4913.0>: Starting to move bucket 51 INFO REPORT <0.110.0> 2011-01-03 13:20:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.4736842105263158]], [['ns_1@10.2.1.101'| 0.30000000000000004]], [['ns_1@10.2.1.102'| 0.3870967741935484]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7103.0> 2011-01-03 13:20:30 =============================================================================== vbucketmigrator<0.7103.0>: Bucket 592 moved to the next server vbucketmigrator<0.7103.0>: Validate bucket states vbucketmigrator<0.7103.0>: 592 ok INFO REPORT <0.7116.0> 2011-01-03 13:20:31 =============================================================================== vbucketmigrator<0.7116.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7116.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7116.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7116.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7116.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7116.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7116.0>: Starting to move bucket 593 INFO REPORT <11993.4913.0> 2011-01-03 13:20:32 =============================================================================== vbucketmigrator<0.4913.0>: Bucket 51 moved to the next server vbucketmigrator<0.4913.0>: Validate bucket states vbucketmigrator<0.4913.0>: 51 ok INFO REPORT <11993.4927.0> 2011-01-03 13:20:33 =============================================================================== vbucketmigrator<0.4927.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4927.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4927.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4927.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4927.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4927.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4927.0>: Starting to move bucket 52 INFO REPORT <0.7116.0> 2011-01-03 13:20:35 =============================================================================== vbucketmigrator<0.7116.0>: Bucket 593 moved to the next server vbucketmigrator<0.7116.0>: Validate bucket states vbucketmigrator<0.7116.0>: 593 ok INFO REPORT <0.7128.0> 2011-01-03 13:20:36 =============================================================================== vbucketmigrator<0.7128.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7128.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7128.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7128.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7128.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7128.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7128.0>: Starting to move bucket 594 INFO REPORT <11993.4927.0> 2011-01-03 13:20:37 =============================================================================== vbucketmigrator<0.4927.0>: Bucket 52 moved to the next server vbucketmigrator<0.4927.0>: Validate bucket states vbucketmigrator<0.4927.0>: 52 ok INFO REPORT <11993.4939.0> 2011-01-03 13:20:38 =============================================================================== vbucketmigrator<0.4939.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4939.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4939.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4939.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4939.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4939.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4939.0>: Starting to move bucket 53 INFO REPORT <0.110.0> 2011-01-03 13:20:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.48538011695906436]], [['ns_1@10.2.1.101'| 0.31176470588235294]], [['ns_1@10.2.1.102'| 0.39882697947214074]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7128.0> 2011-01-03 13:20:40 =============================================================================== vbucketmigrator<0.7128.0>: Bucket 594 moved to the next server vbucketmigrator<0.7128.0>: Validate bucket states vbucketmigrator<0.7128.0>: 594 ok INFO REPORT <0.7142.0> 2011-01-03 13:20:41 =============================================================================== vbucketmigrator<0.7142.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7142.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7142.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7142.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7142.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7142.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7142.0>: Starting to move bucket 595 INFO REPORT <11993.4939.0> 2011-01-03 13:20:43 =============================================================================== vbucketmigrator<0.4939.0>: Bucket 53 moved to the next server vbucketmigrator<0.4939.0>: Validate bucket states vbucketmigrator<0.4939.0>: 53 ok INFO REPORT <0.7142.0> 2011-01-03 13:20:44 =============================================================================== vbucketmigrator<0.7142.0>: Bucket 595 moved to the next server vbucketmigrator<0.7142.0>: Validate bucket states vbucketmigrator<0.7142.0>: 595 ok INFO REPORT <11993.4955.0> 2011-01-03 13:20:44 =============================================================================== vbucketmigrator<0.4955.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4955.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4955.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4955.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4955.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4955.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4955.0>: Starting to move bucket 54 INFO REPORT <0.7150.0> 2011-01-03 13:20:45 =============================================================================== vbucketmigrator<0.7150.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7150.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7150.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7150.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7150.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7150.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7150.0>: Starting to move bucket 596 INFO REPORT <0.110.0> 2011-01-03 13:20:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.49122807017543857]], [['ns_1@10.2.1.101'| 0.32352941176470584]], [['ns_1@10.2.1.102'| 0.407624633431085]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7150.0> 2011-01-03 13:20:49 =============================================================================== vbucketmigrator<0.7150.0>: Bucket 596 moved to the next server vbucketmigrator<0.7150.0>: Validate bucket states vbucketmigrator<0.7150.0>: 596 ok INFO REPORT <11993.4955.0> 2011-01-03 13:20:50 =============================================================================== vbucketmigrator<0.4955.0>: Bucket 54 moved to the next server vbucketmigrator<0.4955.0>: Validate bucket states vbucketmigrator<0.4955.0>: 54 ok INFO REPORT <0.7176.0> 2011-01-03 13:20:50 =============================================================================== vbucketmigrator<0.7176.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7176.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7176.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7176.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7176.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7176.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7176.0>: Starting to move bucket 597 INFO REPORT <11993.4973.0> 2011-01-03 13:20:51 =============================================================================== vbucketmigrator<0.4973.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4973.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4973.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4973.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4973.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4973.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4973.0>: Starting to move bucket 55 INFO REPORT <0.259.0> 2011-01-03 13:20:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 600 auth_errors 0 bucket_conns 12 bytes_read 5702899968 bytes_written 102633203 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 2363338 conn_yields 186 connection_structures 235 curr_connections 34 curr_items 1927564 curr_items_tot 3261226 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 3171 ep_commit_time 0 ep_commit_time_total 1121 ep_data_age 301 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 254 ep_flush_duration_highwat 297 ep_flush_duration_total 1285 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 173022 ep_io_num_read 313621 ep_io_num_write 3057383 ep_io_read_bytes 443932371 ep_io_write_bytes 4327366926 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2293421198 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 812019 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1870536 ep_num_not_my_vbuckets 80925 ep_num_pager_runs 6 ep_num_value_ejects 1871175 ep_oom_errors 0 ep_overhead 35501603 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 32728 ep_storage_age 299 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 313621 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 4913066198 ep_total_del_items 0 ep_total_enqueued 3263229 ep_total_new_items 3055832 ep_total_persisted 3057382 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2328922801 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 599 tap_mutation_received 1574844 tap_mutation_sent 2698065 tap_opaque_received 513 tap_opaque_sent 1111 tap_vbucket_set_sent 1195 threads 4 time 1294089651 total_connections 1024 uptime 1665 version 1.4.4_304_g7d5a132 INFO REPORT <0.7176.0> 2011-01-03 13:20:53 =============================================================================== vbucketmigrator<0.7176.0>: Bucket 597 moved to the next server vbucketmigrator<0.7176.0>: Validate bucket states vbucketmigrator<0.7176.0>: 597 ok INFO REPORT <0.7197.0> 2011-01-03 13:20:54 =============================================================================== vbucketmigrator<0.7197.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7197.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7197.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7197.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7197.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7197.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7197.0>: Starting to move bucket 598 INFO REPORT <11993.4973.0> 2011-01-03 13:20:56 =============================================================================== vbucketmigrator<0.4973.0>: Bucket 55 moved to the next server vbucketmigrator<0.4973.0>: Validate bucket states vbucketmigrator<0.4973.0>: 55 ok INFO REPORT <11993.4991.0> 2011-01-03 13:20:57 =============================================================================== vbucketmigrator<0.4991.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.4991.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4991.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.4991.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.4991.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4991.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.4991.0>: Starting to move bucket 56 INFO REPORT <0.110.0> 2011-01-03 13:20:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.5087719298245614]], [['ns_1@10.2.1.101'| 0.3294117647058824]], [['ns_1@10.2.1.102'| 0.4193548387096774]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7197.0> 2011-01-03 13:20:58 =============================================================================== vbucketmigrator<0.7197.0>: Bucket 598 moved to the next server vbucketmigrator<0.7197.0>: Validate bucket states vbucketmigrator<0.7197.0>: 598 ok INFO REPORT <0.7221.0> 2011-01-03 13:20:59 =============================================================================== vbucketmigrator<0.7221.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7221.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7221.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7221.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7221.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7221.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7221.0>: Starting to move bucket 599 INFO REPORT <0.85.0> 2011-01-03 13:21:02 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <11993.4991.0> 2011-01-03 13:21:02 =============================================================================== vbucketmigrator<0.4991.0>: Bucket 56 moved to the next server vbucketmigrator<0.4991.0>: Validate bucket states vbucketmigrator<0.4991.0>: 56 ok INFO REPORT <0.7221.0> 2011-01-03 13:21:03 =============================================================================== vbucketmigrator<0.7221.0>: Bucket 599 moved to the next server vbucketmigrator<0.7221.0>: Validate bucket states vbucketmigrator<0.7221.0>: 599 ok INFO REPORT <11993.5013.0> 2011-01-03 13:21:03 =============================================================================== vbucketmigrator<0.5013.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5013.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5013.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5013.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5013.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5013.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5013.0>: Starting to move bucket 57 INFO REPORT <0.7276.0> 2011-01-03 13:21:04 =============================================================================== vbucketmigrator<0.7276.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7276.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7276.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7276.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7276.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7276.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7276.0>: Starting to move bucket 600 INFO REPORT <0.7276.0> 2011-01-03 13:21:07 =============================================================================== vbucketmigrator<0.7276.0>: Bucket 600 moved to the next server vbucketmigrator<0.7276.0>: Validate bucket states vbucketmigrator<0.7276.0>: 600 ok INFO REPORT <0.93.0> 2011-01-03 13:21:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89667,413401}}, {active_buckets,["default"]}, {memory, [{total,28338616}, {processes,20464692}, {processes_used,20453420}, {system,7873924}, {atom,560301}, {atom_used,557531}, {binary,243048}, {code,4570913}, {ets,1075684}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1679}, {memory_data,{4284698624,4202479616,{<0.299.0>,1457152}}}, {disk_data, [{"C:\\",48162864,58},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,81285120}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1670403,0}}, {context_switches,{523287,0}}, {garbage_collection,{99512,957423000,0}}, {io,{{input,41367972},{output,22965504}}}, {reductions,{459290919,3844132}}, {run_queue,0}, {runtime,{22978,358}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89668,380402}}, {active_buckets,["default"]}, {memory, [{total,16009496}, {processes,8062284}, {processes_used,8054092}, {system,7947212}, {atom,559813}, {atom_used,556363}, {binary,352888}, {code,4551541}, {ets,1069124}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1610}, {memory_data,{4284698624,4249063424,{<11993.4665.0>,3328652}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,63553536}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1600903,0}}, {context_switches,{268481,0}}, {garbage_collection,{69411,580221119,0}}, {io,{{input,37810984},{output,17767042}}}, {reductions,{166676681,1147945}}, {run_queue,0}, {runtime,{12292,31}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89668,146400}}, {active_buckets,["default"]}, {memory, [{total,12523664}, {processes,5329028}, {processes_used,5315604}, {system,7194636}, {atom,541077}, {atom_used,528589}, {binary,313016}, {code,4280811}, {ets,648516}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,598}, {memory_data,{4284698624,1580326912,{<10870.218.0>,1086308}}}, {disk_data, [{"C:\\",49423972,38},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2563125248}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{590323,0}}, {context_switches,{65264,0}}, {garbage_collection,{16375,73515012,0}}, {io,{{input,9444338},{output,5535348}}}, {reductions,{31211989,597384}}, {run_queue,0}, {runtime,{3354,63}}]}]}] INFO REPORT <0.7330.0> 2011-01-03 13:21:08 =============================================================================== vbucketmigrator<0.7330.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7330.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7330.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7330.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7330.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7330.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7330.0>: Starting to move bucket 601 INFO REPORT <0.110.0> 2011-01-03 13:21:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.52046783625731]], [['ns_1@10.2.1.101'| 0.3411764705882353]], [['ns_1@10.2.1.102'| 0.43108504398826974]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.5013.0> 2011-01-03 13:21:09 =============================================================================== vbucketmigrator<0.5013.0>: Bucket 57 moved to the next server vbucketmigrator<0.5013.0>: Validate bucket states vbucketmigrator<0.5013.0>: 57 ok INFO REPORT <11993.5041.0> 2011-01-03 13:21:10 =============================================================================== vbucketmigrator<0.5041.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5041.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5041.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5041.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5041.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5041.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5041.0>: Starting to move bucket 58 INFO REPORT <0.7330.0> 2011-01-03 13:21:12 =============================================================================== vbucketmigrator<0.7330.0>: Bucket 601 moved to the next server vbucketmigrator<0.7330.0>: Validate bucket states vbucketmigrator<0.7330.0>: 601 ok INFO REPORT <0.7357.0> 2011-01-03 13:21:13 =============================================================================== vbucketmigrator<0.7357.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7357.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7357.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7357.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7357.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7357.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7357.0>: Starting to move bucket 602 INFO REPORT <0.7357.0> 2011-01-03 13:21:16 =============================================================================== vbucketmigrator<0.7357.0>: Bucket 602 moved to the next server vbucketmigrator<0.7357.0>: Validate bucket states vbucketmigrator<0.7357.0>: 602 ok INFO REPORT <0.7430.0> 2011-01-03 13:21:17 =============================================================================== vbucketmigrator<0.7430.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7430.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7430.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7430.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7430.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7430.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7430.0>: Starting to move bucket 603 INFO REPORT <0.110.0> 2011-01-03 13:21:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.5321637426900585]], [['ns_1@10.2.1.101'| 0.34705882352941175]], [['ns_1@10.2.1.102'| 0.43988269794721413]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.5041.0> 2011-01-03 13:21:19 =============================================================================== vbucketmigrator<0.5041.0>: Bucket 58 moved to the next server vbucketmigrator<0.5041.0>: Validate bucket states vbucketmigrator<0.5041.0>: 58 ok INFO REPORT <11993.5082.0> 2011-01-03 13:21:21 =============================================================================== vbucketmigrator<0.5082.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5082.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5082.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5082.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5082.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5082.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5082.0>: Starting to move bucket 59 INFO REPORT <0.7430.0> 2011-01-03 13:21:21 =============================================================================== vbucketmigrator<0.7430.0>: Bucket 603 moved to the next server vbucketmigrator<0.7430.0>: Validate bucket states vbucketmigrator<0.7430.0>: 603 ok INFO REPORT <0.7446.0> 2011-01-03 13:21:22 =============================================================================== vbucketmigrator<0.7446.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7446.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7446.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7446.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7446.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7446.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7446.0>: Starting to move bucket 604 INFO REPORT <0.7446.0> 2011-01-03 13:21:27 =============================================================================== vbucketmigrator<0.7446.0>: Bucket 604 moved to the next server vbucketmigrator<0.7446.0>: Validate bucket states vbucketmigrator<0.7446.0>: 604 ok INFO REPORT <0.110.0> 2011-01-03 13:21:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.543859649122807]], [['ns_1@10.2.1.101'| 0.3529411764705882]], [['ns_1@10.2.1.102'| 0.4486803519061584]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7465.0> 2011-01-03 13:21:28 =============================================================================== vbucketmigrator<0.7465.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7465.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7465.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7465.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7465.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7465.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7465.0>: Starting to move bucket 605 INFO REPORT <11993.5082.0> 2011-01-03 13:21:29 =============================================================================== vbucketmigrator<0.5082.0>: Bucket 59 moved to the next server vbucketmigrator<0.5082.0>: Validate bucket states vbucketmigrator<0.5082.0>: 59 ok INFO REPORT <11993.5102.0> 2011-01-03 13:21:31 =============================================================================== vbucketmigrator<0.5102.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5102.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5102.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5102.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5102.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5102.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5102.0>: Starting to move bucket 60 INFO REPORT <0.7465.0> 2011-01-03 13:21:32 =============================================================================== vbucketmigrator<0.7465.0>: Bucket 605 moved to the next server vbucketmigrator<0.7465.0>: Validate bucket states vbucketmigrator<0.7465.0>: 605 ok INFO REPORT <0.7484.0> 2011-01-03 13:21:33 =============================================================================== vbucketmigrator<0.7484.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7484.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7484.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7484.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7484.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7484.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7484.0>: Starting to move bucket 606 INFO REPORT <0.7484.0> 2011-01-03 13:21:36 =============================================================================== vbucketmigrator<0.7484.0>: Bucket 606 moved to the next server vbucketmigrator<0.7484.0>: Validate bucket states vbucketmigrator<0.7484.0>: 606 ok INFO REPORT <0.7501.0> 2011-01-03 13:21:37 =============================================================================== vbucketmigrator<0.7501.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7501.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7501.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7501.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7501.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7501.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7501.0>: Starting to move bucket 607 INFO REPORT <0.110.0> 2011-01-03 13:21:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.5555555555555556]], [['ns_1@10.2.1.101'| 0.35882352941176465]], [['ns_1@10.2.1.102'| 0.4574780058651027]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:21:41 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <11993.5102.0> 2011-01-03 13:21:41 =============================================================================== vbucketmigrator<0.5102.0>: Bucket 60 moved to the next server vbucketmigrator<0.5102.0>: Validate bucket states vbucketmigrator<0.5102.0>: 60 ok INFO REPORT <0.7501.0> 2011-01-03 13:21:42 =============================================================================== vbucketmigrator<0.7501.0>: Bucket 607 moved to the next server vbucketmigrator<0.7501.0>: Validate bucket states vbucketmigrator<0.7501.0>: 607 ok INFO REPORT <0.7520.0> 2011-01-03 13:21:43 =============================================================================== vbucketmigrator<0.7520.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7520.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7520.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7520.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7520.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7520.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7520.0>: Starting to move bucket 608 INFO REPORT <11993.5127.0> 2011-01-03 13:21:43 =============================================================================== vbucketmigrator<0.5127.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5127.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5127.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5127.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5127.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5127.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5127.0>: Starting to move bucket 61 INFO REPORT <0.7520.0> 2011-01-03 13:21:47 =============================================================================== vbucketmigrator<0.7520.0>: Bucket 608 moved to the next server vbucketmigrator<0.7520.0>: Validate bucket states vbucketmigrator<0.7520.0>: 608 ok INFO REPORT <0.110.0> 2011-01-03 13:21:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.5672514619883041]], [['ns_1@10.2.1.101'| 0.3647058823529412]], [['ns_1@10.2.1.102'| 0.46627565982404695]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7537.0> 2011-01-03 13:21:48 =============================================================================== vbucketmigrator<0.7537.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7537.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7537.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7537.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7537.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7537.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7537.0>: Starting to move bucket 609 INFO REPORT <0.85.0> 2011-01-03 13:21:49 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.5127.0> 2011-01-03 13:21:52 =============================================================================== vbucketmigrator<0.5127.0>: Bucket 61 moved to the next server vbucketmigrator<0.5127.0>: Validate bucket states vbucketmigrator<0.5127.0>: 61 ok INFO REPORT <0.7418.0> 2011-01-03 13:21:52 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7537.0> 2011-01-03 13:21:52 =============================================================================== vbucketmigrator<0.7537.0>: Bucket 609 moved to the next server vbucketmigrator<0.7537.0>: Validate bucket states vbucketmigrator<0.7537.0>: 609 ok INFO REPORT <0.7409.0> 2011-01-03 13:21:53 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7399.0> 2011-01-03 13:21:53 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7555.0> 2011-01-03 13:21:53 =============================================================================== vbucketmigrator<0.7555.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7555.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7555.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7555.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7555.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7555.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7555.0>: Starting to move bucket 610 INFO REPORT <0.7384.0> 2011-01-03 13:21:54 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <11993.5150.0> 2011-01-03 13:21:54 =============================================================================== vbucketmigrator<0.5150.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5150.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5150.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5150.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5150.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5150.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5150.0>: Starting to move bucket 62 INFO REPORT <0.7365.0> 2011-01-03 13:21:54 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7350.0> 2011-01-03 13:21:55 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7316.0> 2011-01-03 13:21:55 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7306.0> 2011-01-03 13:21:56 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7293.0> 2011-01-03 13:21:56 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7283.0> 2011-01-03 13:21:56 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7375.0> 2011-01-03 13:21:57 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7555.0> 2011-01-03 13:21:58 =============================================================================== vbucketmigrator<0.7555.0>: Bucket 610 moved to the next server vbucketmigrator<0.7555.0>: Validate bucket states vbucketmigrator<0.7555.0>: 610 ok INFO REPORT <0.110.0> 2011-01-03 13:21:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.5789473684210527]], [['ns_1@10.2.1.101'| 0.37058823529411766]], [['ns_1@10.2.1.102'| 0.4750733137829912]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7566.0> 2011-01-03 13:21:59 =============================================================================== vbucketmigrator<0.7566.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7566.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7566.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7566.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7566.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7566.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7566.0>: Starting to move bucket 611 INFO REPORT <0.7566.0> 2011-01-03 13:22:03 =============================================================================== vbucketmigrator<0.7566.0>: Bucket 611 moved to the next server vbucketmigrator<0.7566.0>: Validate bucket states vbucketmigrator<0.7566.0>: 611 ok INFO REPORT <0.7584.0> 2011-01-03 13:22:04 =============================================================================== vbucketmigrator<0.7584.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7584.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7584.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7584.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7584.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7584.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7584.0>: Starting to move bucket 612 INFO REPORT <0.7584.0> 2011-01-03 13:22:07 =============================================================================== vbucketmigrator<0.7584.0>: Bucket 612 moved to the next server vbucketmigrator<0.7584.0>: Validate bucket states vbucketmigrator<0.7584.0>: 612 ok INFO REPORT <0.93.0> 2011-01-03 13:22:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89727,411401}}, {active_buckets,["default"]}, {memory, [{total,22837656}, {processes,15008868}, {processes_used,14994532}, {system,7828788}, {atom,560301}, {atom_used,557531}, {binary,201152}, {code,4570913}, {ets,1073988}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1739}, {memory_data,{4284698624,4217487360,{<0.6900.0>,6171272}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,113373184}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1730401,0}}, {context_switches,{547659,0}}, {garbage_collection,{103244,1023416008,0}}, {io,{{input,44257089},{output,23865002}}}, {reductions,{476505849,2866823}}, {run_queue,0}, {runtime,{23712,94}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89728,394401}}, {active_buckets,["default"]}, {memory, [{total,18292184}, {processes,10358420}, {processes_used,10349740}, {system,7933764}, {atom,559813}, {atom_used,556363}, {binary,339128}, {code,4551541}, {ets,1069060}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1670}, {memory_data,{4284698624,4202835968,{<11993.4748.0>,3328652}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,59305984}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1660901,0}}, {context_switches,{280835,0}}, {garbage_collection,{71999,608157037,0}}, {io,{{input,38721258},{output,18195661}}}, {reductions,{175401696,1125131}}, {run_queue,0}, {runtime,{12870,110}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89728,144400}}, {active_buckets,["default"]}, {memory, [{total,13275512}, {processes,6073916}, {processes_used,6060492}, {system,7201596}, {atom,541077}, {atom_used,528589}, {binary,312872}, {code,4280811}, {ets,655348}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,658}, {memory_data,{4284698624,1749544960,{<10870.218.0>,1271780}}}, {disk_data, [{"C:\\",49423972,38},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2408480768}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{650321,0}}, {context_switches,{71704,0}}, {garbage_collection,{18107,83109475,0}}, {io,{{input,9719377},{output,5818482}}}, {reductions,{34934708,571409}}, {run_queue,0}, {runtime,{3494,0}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:22:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.5906432748538012]], [['ns_1@10.2.1.101'| 0.37058823529411766]], [['ns_1@10.2.1.102'| 0.4809384164222874]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7592.0> 2011-01-03 13:22:08 =============================================================================== vbucketmigrator<0.7592.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7592.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7592.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7592.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7592.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7592.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7592.0>: Starting to move bucket 613 INFO REPORT <11993.5150.0> 2011-01-03 13:22:11 =============================================================================== vbucketmigrator<0.5150.0>: Bucket 62 moved to the next server vbucketmigrator<0.5150.0>: Validate bucket states vbucketmigrator<0.5150.0>: 62 ok INFO REPORT <0.7592.0> 2011-01-03 13:22:13 =============================================================================== vbucketmigrator<0.7592.0>: Bucket 613 moved to the next server vbucketmigrator<0.7592.0>: Validate bucket states vbucketmigrator<0.7592.0>: 613 ok INFO REPORT <0.7618.0> 2011-01-03 13:22:14 =============================================================================== vbucketmigrator<0.7618.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7618.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7618.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7618.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7618.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7618.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7618.0>: Starting to move bucket 614 INFO REPORT <11993.5210.0> 2011-01-03 13:22:14 =============================================================================== vbucketmigrator<0.5210.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5210.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5210.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5210.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5210.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5210.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5210.0>: Starting to move bucket 63 INFO REPORT <0.7618.0> 2011-01-03 13:22:18 =============================================================================== vbucketmigrator<0.7618.0>: Bucket 614 moved to the next server vbucketmigrator<0.7618.0>: Validate bucket states vbucketmigrator<0.7618.0>: 614 ok INFO REPORT <0.110.0> 2011-01-03 13:22:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.6023391812865497]], [['ns_1@10.2.1.101'| 0.3764705882352941]], [['ns_1@10.2.1.102'| 0.4897360703812317]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7635.0> 2011-01-03 13:22:19 =============================================================================== vbucketmigrator<0.7635.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7635.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7635.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7635.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7635.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7635.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7635.0>: Starting to move bucket 615 INFO REPORT <0.7635.0> 2011-01-03 13:22:23 =============================================================================== vbucketmigrator<0.7635.0>: Bucket 615 moved to the next server vbucketmigrator<0.7635.0>: Validate bucket states vbucketmigrator<0.7635.0>: 615 ok INFO REPORT <0.7656.0> 2011-01-03 13:22:24 =============================================================================== vbucketmigrator<0.7656.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7656.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7656.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7656.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7656.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7656.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7656.0>: Starting to move bucket 616 INFO REPORT <11993.5210.0> 2011-01-03 13:22:24 =============================================================================== vbucketmigrator<0.5210.0>: Bucket 63 moved to the next server vbucketmigrator<0.5210.0>: Validate bucket states vbucketmigrator<0.5210.0>: 63 ok INFO REPORT <11993.5240.0> 2011-01-03 13:22:27 =============================================================================== vbucketmigrator<0.5240.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5240.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5240.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5240.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5240.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5240.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5240.0>: Starting to move bucket 64 INFO REPORT <0.7656.0> 2011-01-03 13:22:28 =============================================================================== vbucketmigrator<0.7656.0>: Bucket 616 moved to the next server vbucketmigrator<0.7656.0>: Validate bucket states vbucketmigrator<0.7656.0>: 616 ok INFO REPORT <0.110.0> 2011-01-03 13:22:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.6140350877192983]], [['ns_1@10.2.1.101'| 0.38235294117647056]], [['ns_1@10.2.1.102'| 0.49853372434017595]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7669.0> 2011-01-03 13:22:29 =============================================================================== vbucketmigrator<0.7669.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7669.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7669.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7669.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7669.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7669.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7669.0>: Starting to move bucket 617 INFO REPORT <0.259.0> 2011-01-03 13:22:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 620 auth_errors 0 bucket_conns 62 bytes_read 5754775572 bytes_written 136856098 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 3250265 conn_yields 186 connection_structures 235 curr_connections 84 curr_items 2377386 curr_items_tot 3815666 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 3290 ep_commit_time 0 ep_commit_time_total 1165 ep_data_age 312 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 254 ep_flush_duration_highwat 297 ep_flush_duration_total 1285 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 56021 ep_io_num_read 398049 ep_io_num_write 3174382 ep_io_read_bytes 563451632 ep_io_write_bytes 4493046566 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2332650606 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 772745 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1888300 ep_num_not_my_vbuckets 412006 ep_num_pager_runs 6 ep_num_value_ejects 1889422 ep_oom_errors 0 ep_overhead 56764071 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 588286 ep_storage_age 311 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 398049 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 4977840014 ep_total_del_items 0 ep_total_enqueued 3818787 ep_total_new_items 3172639 ep_total_persisted 3174381 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2389414677 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 619 tap_mutation_received 1574844 tap_mutation_sent 2848216 tap_opaque_received 513 tap_opaque_sent 1131 tap_vbucket_set_sent 1235 threads 4 time 1294089750 total_connections 1204 uptime 1764 version 1.4.4_304_g7d5a132 INFO REPORT <0.7669.0> 2011-01-03 13:22:33 =============================================================================== vbucketmigrator<0.7669.0>: Bucket 617 moved to the next server vbucketmigrator<0.7669.0>: Validate bucket states vbucketmigrator<0.7669.0>: 617 ok INFO REPORT <0.7692.0> 2011-01-03 13:22:34 =============================================================================== vbucketmigrator<0.7692.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7692.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7692.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7692.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7692.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7692.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7692.0>: Starting to move bucket 618 INFO REPORT <11993.5240.0> 2011-01-03 13:22:38 =============================================================================== vbucketmigrator<0.5240.0>: Bucket 64 moved to the next server vbucketmigrator<0.5240.0>: Validate bucket states vbucketmigrator<0.5240.0>: 64 ok INFO REPORT <0.110.0> 2011-01-03 13:22:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.6257309941520468]], [['ns_1@10.2.1.101'| 0.38235294117647056]], [['ns_1@10.2.1.102'| 0.5043988269794721]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7692.0> 2011-01-03 13:22:39 =============================================================================== vbucketmigrator<0.7692.0>: Bucket 618 moved to the next server vbucketmigrator<0.7692.0>: Validate bucket states vbucketmigrator<0.7692.0>: 618 ok INFO REPORT <0.7714.0> 2011-01-03 13:22:40 =============================================================================== vbucketmigrator<0.7714.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7714.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7714.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7714.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7714.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7714.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7714.0>: Starting to move bucket 619 INFO REPORT <11993.5266.0> 2011-01-03 13:22:40 =============================================================================== vbucketmigrator<0.5266.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5266.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5266.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5266.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5266.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5266.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5266.0>: Starting to move bucket 65 INFO REPORT <0.85.0> 2011-01-03 13:22:42 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.7714.0> 2011-01-03 13:22:43 =============================================================================== vbucketmigrator<0.7714.0>: Bucket 619 moved to the next server vbucketmigrator<0.7714.0>: Validate bucket states vbucketmigrator<0.7714.0>: 619 ok INFO REPORT <0.7727.0> 2011-01-03 13:22:44 =============================================================================== vbucketmigrator<0.7727.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7727.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7727.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7727.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7727.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7727.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7727.0>: Starting to move bucket 620 INFO REPORT <0.110.0> 2011-01-03 13:22:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.631578947368421]], [['ns_1@10.2.1.101'| 0.388235294117647]], [['ns_1@10.2.1.102'| 0.5102639296187683]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7727.0> 2011-01-03 13:22:49 =============================================================================== vbucketmigrator<0.7727.0>: Bucket 620 moved to the next server vbucketmigrator<0.7727.0>: Validate bucket states vbucketmigrator<0.7727.0>: 620 ok INFO REPORT <0.7746.0> 2011-01-03 13:22:50 =============================================================================== vbucketmigrator<0.7746.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7746.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7746.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7746.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7746.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7746.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7746.0>: Starting to move bucket 621 INFO REPORT <11993.5266.0> 2011-01-03 13:22:51 =============================================================================== vbucketmigrator<0.5266.0>: Bucket 65 moved to the next server vbucketmigrator<0.5266.0>: Validate bucket states vbucketmigrator<0.5266.0>: 65 ok INFO REPORT <11993.5299.0> 2011-01-03 13:22:54 =============================================================================== vbucketmigrator<0.5299.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5299.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5299.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5299.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5299.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5299.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5299.0>: Starting to move bucket 66 INFO REPORT <0.7746.0> 2011-01-03 13:22:54 =============================================================================== vbucketmigrator<0.7746.0>: Bucket 621 moved to the next server vbucketmigrator<0.7746.0>: Validate bucket states vbucketmigrator<0.7746.0>: 621 ok INFO REPORT <0.7767.0> 2011-01-03 13:22:55 =============================================================================== vbucketmigrator<0.7767.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7767.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7767.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7767.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7767.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7767.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7767.0>: Starting to move bucket 622 INFO REPORT <0.110.0> 2011-01-03 13:22:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.6432748538011697]], [['ns_1@10.2.1.101'| 0.3941176470588236]], [['ns_1@10.2.1.102'| 0.5190615835777126]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7767.0> 2011-01-03 13:22:59 =============================================================================== vbucketmigrator<0.7767.0>: Bucket 622 moved to the next server vbucketmigrator<0.7767.0>: Validate bucket states vbucketmigrator<0.7767.0>: 622 ok INFO REPORT <0.7789.0> 2011-01-03 13:23:01 =============================================================================== vbucketmigrator<0.7789.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7789.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7789.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7789.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7789.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7789.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7789.0>: Starting to move bucket 623 INFO REPORT <0.7789.0> 2011-01-03 13:23:04 =============================================================================== vbucketmigrator<0.7789.0>: Bucket 623 moved to the next server vbucketmigrator<0.7789.0>: Validate bucket states vbucketmigrator<0.7789.0>: 623 ok INFO REPORT <11993.5299.0> 2011-01-03 13:23:04 =============================================================================== vbucketmigrator<0.5299.0>: Bucket 66 moved to the next server vbucketmigrator<0.5299.0>: Validate bucket states vbucketmigrator<0.5299.0>: 66 ok INFO REPORT <0.7801.0> 2011-01-03 13:23:05 =============================================================================== vbucketmigrator<0.7801.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7801.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7801.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7801.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7801.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7801.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7801.0>: Starting to move bucket 624 INFO REPORT <0.85.0> 2011-01-03 13:23:05 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <11993.5326.0> 2011-01-03 13:23:06 =============================================================================== vbucketmigrator<0.5326.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5326.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5326.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5326.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5326.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5326.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5326.0>: Starting to move bucket 67 INFO REPORT <0.93.0> 2011-01-03 13:23:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89787,409402}}, {active_buckets,["default"]}, {memory, [{total,22997448}, {processes,15088028}, {processes_used,15073868}, {system,7909420}, {atom,560301}, {atom_used,557531}, {binary,247504}, {code,4570913}, {ets,1107996}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1799}, {memory_data,{4284698624,4181590016,{<0.299.0>,3328596}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,80920576}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1790398,0}}, {context_switches,{565534,0}}, {garbage_collection,{106246,1071131776,0}}, {io,{{input,47044833},{output,24508447}}}, {reductions,{489266299,2776576}}, {run_queue,0}, {runtime,{24242,172}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89788,392402}}, {active_buckets,["default"]}, {memory, [{total,16665752}, {processes,8684404}, {processes_used,8675724}, {system,7981348}, {atom,559813}, {atom_used,556363}, {binary,351104}, {code,4551541}, {ets,1104676}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1730}, {memory_data,{4284698624,4252368896,{<11993.387.0>,2357452}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,32522240}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1720899,0}}, {context_switches,{292886,0}}, {garbage_collection,{74717,635481969,0}}, {io,{{input,40396973},{output,19340948}}}, {reductions,{184022617,2049441}}, {run_queue,0}, {runtime,{13806,188}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89788,158400}}, {active_buckets,["default"]}, {memory, [{total,12833352}, {processes,5594724}, {processes_used,5581300}, {system,7238628}, {atom,541077}, {atom_used,528589}, {binary,315864}, {code,4280811}, {ets,689380}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,718}, {memory_data,{4284698624,1895641088,{<10870.218.0>,1271780}}}, {disk_data, [{"C:\\",49423972,38},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2286948352}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{710319,0}}, {context_switches,{77352,0}}, {garbage_collection,{19694,92055199,0}}, {io,{{input,9993553},{output,6082837}}}, {reductions,{38508217,611933}}, {run_queue,0}, {runtime,{3588,16}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:23:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.6608187134502924]], [['ns_1@10.2.1.101'| 0.3941176470588236]], [['ns_1@10.2.1.102'| 0.5278592375366569]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7801.0> 2011-01-03 13:23:09 =============================================================================== vbucketmigrator<0.7801.0>: Bucket 624 moved to the next server vbucketmigrator<0.7801.0>: Validate bucket states vbucketmigrator<0.7801.0>: 624 ok INFO REPORT <0.7835.0> 2011-01-03 13:23:10 =============================================================================== vbucketmigrator<0.7835.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7835.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7835.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7835.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7835.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7835.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7835.0>: Starting to move bucket 625 INFO REPORT <0.7835.0> 2011-01-03 13:23:14 =============================================================================== vbucketmigrator<0.7835.0>: Bucket 625 moved to the next server vbucketmigrator<0.7835.0>: Validate bucket states vbucketmigrator<0.7835.0>: 625 ok INFO REPORT <0.7854.0> 2011-01-03 13:23:15 =============================================================================== vbucketmigrator<0.7854.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7854.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7854.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7854.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7854.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7854.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7854.0>: Starting to move bucket 626 INFO REPORT <11993.5326.0> 2011-01-03 13:23:17 =============================================================================== vbucketmigrator<0.5326.0>: Bucket 67 moved to the next server vbucketmigrator<0.5326.0>: Validate bucket states vbucketmigrator<0.5326.0>: 67 ok INFO REPORT <0.110.0> 2011-01-03 13:23:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.672514619883041]], [['ns_1@10.2.1.101'| 0.4]], [['ns_1@10.2.1.102'| 0.5366568914956011]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.5353.0> 2011-01-03 13:23:18 =============================================================================== vbucketmigrator<0.5353.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5353.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5353.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5353.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5353.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5353.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5353.0>: Starting to move bucket 68 INFO REPORT <0.7854.0> 2011-01-03 13:23:19 =============================================================================== vbucketmigrator<0.7854.0>: Bucket 626 moved to the next server vbucketmigrator<0.7854.0>: Validate bucket states vbucketmigrator<0.7854.0>: 626 ok INFO REPORT <0.7871.0> 2011-01-03 13:23:20 =============================================================================== vbucketmigrator<0.7871.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7871.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7871.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7871.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7871.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7871.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7871.0>: Starting to move bucket 627 INFO REPORT <0.7871.0> 2011-01-03 13:23:23 =============================================================================== vbucketmigrator<0.7871.0>: Bucket 627 moved to the next server vbucketmigrator<0.7871.0>: Validate bucket states vbucketmigrator<0.7871.0>: 627 ok INFO REPORT <0.7880.0> 2011-01-03 13:23:24 =============================================================================== vbucketmigrator<0.7880.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7880.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7880.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7880.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7880.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7880.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7880.0>: Starting to move bucket 628 INFO REPORT <0.110.0> 2011-01-03 13:23:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.6783625730994152]], [['ns_1@10.2.1.101'| 0.40588235294117647]], [['ns_1@10.2.1.102'| 0.5425219941348973]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7880.0> 2011-01-03 13:23:28 =============================================================================== vbucketmigrator<0.7880.0>: Bucket 628 moved to the next server vbucketmigrator<0.7880.0>: Validate bucket states vbucketmigrator<0.7880.0>: 628 ok INFO REPORT <11993.5353.0> 2011-01-03 13:23:29 =============================================================================== vbucketmigrator<0.5353.0>: Bucket 68 moved to the next server vbucketmigrator<0.5353.0>: Validate bucket states vbucketmigrator<0.5353.0>: 68 ok INFO REPORT <0.7904.0> 2011-01-03 13:23:29 =============================================================================== vbucketmigrator<0.7904.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7904.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7904.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7904.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7904.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7904.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7904.0>: Starting to move bucket 629 INFO REPORT <11993.5384.0> 2011-01-03 13:23:32 =============================================================================== vbucketmigrator<0.5384.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5384.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5384.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5384.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5384.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5384.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5384.0>: Starting to move bucket 69 INFO REPORT <0.7904.0> 2011-01-03 13:23:34 =============================================================================== vbucketmigrator<0.7904.0>: Bucket 629 moved to the next server vbucketmigrator<0.7904.0>: Validate bucket states vbucketmigrator<0.7904.0>: 629 ok INFO REPORT <0.7926.0> 2011-01-03 13:23:35 =============================================================================== vbucketmigrator<0.7926.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7926.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7926.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7926.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7926.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7926.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7926.0>: Starting to move bucket 630 INFO REPORT <0.110.0> 2011-01-03 13:23:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.6900584795321638]], [['ns_1@10.2.1.101'| 0.4117647058823529]], [['ns_1@10.2.1.102'| 0.5513196480938416]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7926.0> 2011-01-03 13:23:39 =============================================================================== vbucketmigrator<0.7926.0>: Bucket 630 moved to the next server vbucketmigrator<0.7926.0>: Validate bucket states vbucketmigrator<0.7926.0>: 630 ok INFO REPORT <0.7944.0> 2011-01-03 13:23:40 =============================================================================== vbucketmigrator<0.7944.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7944.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7944.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7944.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7944.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7944.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7944.0>: Starting to move bucket 631 INFO REPORT <11993.5384.0> 2011-01-03 13:23:43 =============================================================================== vbucketmigrator<0.5384.0>: Bucket 69 moved to the next server vbucketmigrator<0.5384.0>: Validate bucket states vbucketmigrator<0.5384.0>: 69 ok INFO REPORT <0.7944.0> 2011-01-03 13:23:44 =============================================================================== vbucketmigrator<0.7944.0>: Bucket 631 moved to the next server vbucketmigrator<0.7944.0>: Validate bucket states vbucketmigrator<0.7944.0>: 631 ok INFO REPORT <11993.5413.0> 2011-01-03 13:23:44 =============================================================================== vbucketmigrator<0.5413.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5413.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5413.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5413.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5413.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5413.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5413.0>: Starting to move bucket 70 INFO REPORT <0.7957.0> 2011-01-03 13:23:45 =============================================================================== vbucketmigrator<0.7957.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7957.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7957.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7957.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7957.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7957.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7957.0>: Starting to move bucket 632 INFO REPORT <0.110.0> 2011-01-03 13:23:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.7017543859649122]], [['ns_1@10.2.1.101'| 0.41764705882352937]], [['ns_1@10.2.1.102'| 0.5601173020527859]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7957.0> 2011-01-03 13:23:48 =============================================================================== vbucketmigrator<0.7957.0>: Bucket 632 moved to the next server vbucketmigrator<0.7957.0>: Validate bucket states vbucketmigrator<0.7957.0>: 632 ok INFO REPORT <0.7976.0> 2011-01-03 13:23:49 =============================================================================== vbucketmigrator<0.7976.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7976.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7976.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7976.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7976.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7976.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7976.0>: Starting to move bucket 633 INFO REPORT <0.85.0> 2011-01-03 13:23:53 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.7976.0> 2011-01-03 13:23:54 =============================================================================== vbucketmigrator<0.7976.0>: Bucket 633 moved to the next server vbucketmigrator<0.7976.0>: Validate bucket states vbucketmigrator<0.7976.0>: 633 ok INFO REPORT <11993.5413.0> 2011-01-03 13:23:54 =============================================================================== vbucketmigrator<0.5413.0>: Bucket 70 moved to the next server vbucketmigrator<0.5413.0>: Validate bucket states vbucketmigrator<0.5413.0>: 70 ok INFO REPORT <0.7997.0> 2011-01-03 13:23:55 =============================================================================== vbucketmigrator<0.7997.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7997.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7997.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7997.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.7997.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7997.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.7997.0>: Starting to move bucket 634 INFO REPORT <11993.5442.0> 2011-01-03 13:23:55 =============================================================================== vbucketmigrator<0.5442.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5442.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5442.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5442.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5442.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5442.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5442.0>: Starting to move bucket 71 INFO REPORT <0.110.0> 2011-01-03 13:23:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.5582.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.7192982456140351]], [['ns_1@10.2.1.101'| 0.41764705882352937]], [['ns_1@10.2.1.102'| 0.5689149560117301]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7997.0> 2011-01-03 13:23:59 =============================================================================== vbucketmigrator<0.7997.0>: Bucket 634 moved to the next server vbucketmigrator<0.7997.0>: Validate bucket states vbucketmigrator<0.7997.0>: 634 ok INFO REPORT <0.8014.0> 2011-01-03 13:24:00 =============================================================================== vbucketmigrator<0.8014.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8014.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8014.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8014.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8014.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8014.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8014.0>: Starting to move bucket 635 ERROR REPORT <11993.5442.0> 2011-01-03 13:24:02 =============================================================================== ** Generic server <11993.5442.0> terminating ** Last message in was {'EXIT',<11993.5441.0>,stopped} ** When Server state == {state,#Port<11993.5393>,vbucketmigrator, {["Starting to move bucket 71", "Authenticated towards: {Sock 10.2.1.101:11210}"], ["Authenticating towards: {Sock 10.2.1.101:11210}"]}, undefined,[],0} ** Reason for termination == ** stopped CRASH REPORT <11993.5442.0> 2011-01-03 13:24:02 =============================================================================== Crashing process initial_call {ns_port_server,init,['Argument__1']} pid <11993.5442.0> registered_name [] error_info {exit,stopped, [{gen_server,terminate,6},{proc_lib,init_p_do_apply,3}]} ancestors [<11993.5441.0>] messages [{'EXIT',#Port<11993.5393>,normal}] links [] dictionary [] trap_exit true status running heap_size 1597 stack_size 24 reductions 443 INFO REPORT <0.110.0> 2011-01-03 13:24:02 =============================================================================== ns_log: logging ns_orchestrator:2:Rebalance exited with reason stopped ERROR REPORT <0.8014.0> 2011-01-03 13:24:02 =============================================================================== ** Generic server <0.8014.0> terminating ** Last message in was {'EXIT',<0.8013.0>,stopped} ** When Server state == {state,#Port<0.6312>,vbucketmigrator, {["Starting to move bucket 635", "Authenticated towards: {Sock 10.2.1.100:11210}"], ["Authenticating towards: {Sock 10.2.1.100:11210}"]}, undefined,[],0} ** Reason for termination == ** stopped CRASH REPORT <0.8014.0> 2011-01-03 13:24:02 =============================================================================== Crashing process initial_call {ns_port_server,init,['Argument__1']} pid <0.8014.0> registered_name [] error_info {exit,stopped, [{gen_server,terminate,6},{proc_lib,init_p_do_apply,3}]} ancestors [<0.8013.0>] messages [{'EXIT',#Port<0.6312>,normal}] links [] dictionary [] trap_exit true status running heap_size 1597 stack_size 24 reductions 443 INFO REPORT <0.65.0> 2011-01-03 13:24:03 =============================================================================== config change: rebalance_status -> {none,<<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>} INFO REPORT <0.65.0> 2011-01-03 13:24:03 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:24:03 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:24:03 =============================================================================== Pushing config done INFO REPORT <0.85.0> 2011-01-03 13:24:05 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.93.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89847,407401}}, {active_buckets,["default"]}, {memory, [{total,22341624}, {processes,14343684}, {processes_used,14328468}, {system,7997940}, {atom,560301}, {atom_used,557531}, {binary,300136}, {code,4570913}, {ets,1143676}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1859}, {memory_data,{4284698624,4205457408,{<0.299.0>,2357452}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,146411520}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1850396,0}}, {context_switches,{584099,0}}, {garbage_collection,{109429,1121613760,0}}, {io,{{input,50917984},{output,26156921}}}, {reductions,{502434572,2896462}}, {run_queue,0}, {runtime,{24772,156}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89847,391400}}, {active_buckets,["default"]}, {memory, [{total,17545616}, {processes,9477900}, {processes_used,9468380}, {system,8067716}, {atom,559813}, {atom_used,556363}, {binary,439560}, {code,4551541}, {ets,1102428}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1789}, {memory_data,{4284698624,4251324416,{<11993.387.0>,2357452}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,54812672}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1780897,0}}, {context_switches,{304536,0}}, {garbage_collection,{77081,662148121,0}}, {io,{{input,41351007},{output,19764140}}}, {reductions,{192832423,2153585}}, {run_queue,0}, {runtime,{14632,202}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89848,156400}}, {active_buckets,["default"]}, {memory, [{total,13114960}, {processes,5750132}, {processes_used,5736708}, {system,7364828}, {atom,541077}, {atom_used,528589}, {binary,406104}, {code,4280811}, {ets,725524}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,778}, {memory_data,{4284698624,2020220928,{<10870.218.0>,1086308}}}, {disk_data, [{"C:\\",49423972,39},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2153156608}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{770317,0}}, {context_switches,{83379,0}}, {garbage_collection,{21398,101347594,0}}, {io,{{input,11021434},{output,6786245}}}, {reductions,{42175232,590508}}, {run_queue,0}, {runtime,{3806,62}}]}]}] ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 0 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 1 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 2 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 3 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 4 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 5 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 6 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 7 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 8 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 9 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 10 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 11 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 12 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 13 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 14 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 15 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 16 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 17 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 18 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 19 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 20 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 21 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 22 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 23 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 24 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 25 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 26 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 27 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 28 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 29 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 30 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 31 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 32 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 33 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 34 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 35 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 36 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 37 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 38 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 39 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 40 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 41 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 42 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 43 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 44 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 45 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 46 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 47 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 48 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 49 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 50 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 51 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 52 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 53 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 54 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 55 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 56 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 57 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 58 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 59 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 60 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 61 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 62 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 63 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 64 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 65 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 66 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 67 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 68 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 69 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 70 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. INFO REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:209: Deleting vbucket 71 in "default" on 'ns_1@10.2.1.102' ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 512 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 513 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 514 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 515 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 516 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 517 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 518 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 519 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 520 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 521 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 522 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 523 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 524 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 525 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 526 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 527 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 528 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 529 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 530 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 531 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 532 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 533 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 534 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 535 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 536 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 537 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 538 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 539 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 540 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 541 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 542 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 543 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 544 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 545 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 546 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 547 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 548 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 549 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 550 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 551 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 552 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 553 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 554 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 555 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 556 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 557 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 558 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 559 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 560 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 561 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 562 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 563 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 564 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 565 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 566 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 567 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 568 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 569 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 570 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 571 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 572 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 573 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 574 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 575 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 576 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 577 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 578 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 579 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 580 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 581 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 582 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 583 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 584 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 585 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 586 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 587 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 588 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 589 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 590 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 591 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 592 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 593 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 594 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 595 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 596 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 597 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 598 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 599 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 600 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 601 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 602 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 603 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 604 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 605 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 606 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 607 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 608 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 609 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 610 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 611 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 612 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 613 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 614 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 615 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 616 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 617 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 618 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 619 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 620 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 621 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 622 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 623 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 624 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 625 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 626 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 627 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 628 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 629 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 630 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 631 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 632 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 633 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 634 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. INFO REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:209: Deleting vbucket 635 in "default" on 'ns_1@10.2.1.102' INFO REPORT <0.65.0> 2011-01-03 13:24:08 =============================================================================== config change: buckets -> [{configs,[{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']}, {map,[['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101'|...], [...]|...]}]}]}] INFO REPORT <0.8066.0> 2011-01-03 13:24:08 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [1023,1022,1021,1020,1019,1018,1017,1016,1015,1014,1013,1012,1011,1010,1009,1008,1007,1006,1005,1004,1003,1002,1001,1000,999,998,997,996,995,994,993,992,991,990,989,988,987,986,985,984,983,982,981,980,979,978,977,976,975,974,973,972,971,970,969,968,967,966,965,964,963,962,961,960,959,958,957,956,955,954,953,952,951,950,949,948,947,946,945,944,943,942,941,940,939,938,937,936,935,934,933,932,931,930,929,928,927,926,925,924,923,922,921,920,919,918,917,916,915,914,913,912,911,910,909,908,907,906,905,904,903,902,901,900,899,898,897,896,895,894,893,892,891,890,889,888,887,886,885,884,883,882,881,880,879,878,877,876,875,874,873,872,871,870,869,868,867,866,865,864,863,862,861,860,859,858,857,856,855,854,853,852,851,850,849,848,847,846,845,844,843,842,841,840,839,838,837,836,835,834,833,832,831,830,829,828,827,826,825,824,823,822,821,820,819,818,817,816,815,814,813,812,811,810,809,808,807,806,805,804,803,802,801,800,799,798,797,796,795,794,793,792,791,790,789,788,787,786,785,784,783,782,781,780,779,778,777,776,775,774,773,772,771,770,769,768,767,766,765,764,763,762,761,760,759,758,757,756,755,754,753,752,751,750,749,748,747,746,745,744,743,742,741,740,739,738,737,736,735,734,733,732,731,730,729,728,727,726,725,724,723,722,721,720,719,718,717,716,715,714,713,712,711,710,709,708,707,706,705,704,703,702,701,700,699,698,697,696,695,694,693,692,691,690,689,688,687,686,685,684,683,682,681,680,679,678,677,676,675,674,673,672,671,670,669,668,667,666,665,664,663,662,661,660,659,658,657,656,655,654,653,652,651,650,649,648,647,646,645,644,643,642,641,640,639,638,637,636,635,634,633,632,631,630,629,628,627,626,625,624,623,622,621,620,619,618,617,616,615,614,613,612,611,610,609,608,607,606,605,604,603,602,601,600,599,598,597,596,595,594,593,592,591,590,589,588,587,586,585,584,583,582,581,580,579,578,577,576,575,574,573,572,571,570,569,568,567,566,565,564,563,562,561,560,559,558,557,556,555,554,553,552,551,550,549,548,547,546,545,544,543,542,541,540,539,538,537,536,535,534,533,532,531,530,529,528,527,526,525,524,523,522,521,520,519,518,517,516,515,514,513,512] in bucket "default" from node 'ns_1@10.2.1.100' to node 'ns_1@10.2.1.101' INFO REPORT <0.65.0> 2011-01-03 13:24:08 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:24:08 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:24:08 =============================================================================== Pushing config done INFO REPORT <0.8066.0> 2011-01-03 13:24:09 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210","-d","10.2.1.101:11210","-A", "-v","-b","1023","-b","1022","-b","1021","-b","1020","-b","1019","-b", "1018","-b","1017","-b","1016","-b","1015","-b","1014","-b","1013","-b", "1012","-b","1011","-b","1010","-b","1009","-b","1008","-b","1007","-b", "1006","-b","1005","-b","1004","-b","1003","-b","1002","-b","1001","-b", "1000","-b","999","-b","998","-b","997","-b","996","-b","995","-b","994", "-b","993","-b","992","-b","991","-b","990","-b","989","-b","988","-b", "987","-b","986","-b","985","-b","984","-b","983","-b","982","-b","981", "-b","980","-b","979","-b","978","-b","977","-b","976","-b","975","-b", "974","-b","973","-b","972","-b","971","-b","970","-b","969","-b","968", "-b","967","-b","966","-b","965","-b","964","-b","963","-b","962","-b", "961","-b","960","-b","959","-b","958","-b","957","-b","956","-b","955", "-b","954","-b","953","-b","952","-b","951","-b","950","-b","949","-b", "948","-b","947","-b","946","-b","945","-b","944","-b","943","-b","942", "-b","941","-b","940","-b","939","-b","938","-b","937","-b","936","-b", "935","-b","934","-b","933","-b","932","-b","931","-b","930","-b","929", "-b","928","-b","927","-b","926","-b","925","-b","924","-b","923","-b", "922","-b","921","-b","920","-b","919","-b","918","-b","917","-b","916", "-b","915","-b","914","-b","913","-b","912","-b","911","-b","910","-b", "909","-b","908","-b","907","-b","906","-b","905","-b","904","-b","903", "-b","902","-b","901","-b","900","-b","899","-b","898","-b","897","-b", "896","-b","895","-b","894","-b","893","-b","892","-b","891","-b","890", "-b","889","-b","888","-b","887","-b","886","-b","885","-b","884","-b", "883","-b","882","-b","881","-b","880","-b","879","-b","878","-b","877", "-b","876","-b","875","-b","874","-b","873","-b","872","-b","871","-b", "870","-b","869","-b","868","-b","867","-b","866","-b","865","-b","864", "-b","863","-b","862","-b","861","-b","860","-b","859","-b","858","-b", "857","-b","856","-b","855","-b","854","-b","853","-b","852","-b","851", "-b","850","-b","849","-b","848","-b","847","-b","846","-b","845","-b", "844","-b","843","-b","842","-b","841","-b","840","-b","839","-b","838", "-b","837","-b","836","-b","835","-b","834","-b","833","-b","832","-b", "831","-b","830","-b","829","-b","828","-b","827","-b","826","-b","825", "-b","824","-b","823","-b","822","-b","821","-b","820","-b","819","-b", "818","-b","817","-b","816","-b","815","-b","814","-b","813","-b","812", "-b","811","-b","810","-b","809","-b","808","-b","807","-b","806","-b", "805","-b","804","-b","803","-b","802","-b","801","-b","800","-b","799", "-b","798","-b","797","-b","796","-b","795","-b","794","-b","793","-b", "792","-b","791","-b","790","-b","789","-b","788","-b","787","-b","786", "-b","785","-b","784","-b","783","-b","782","-b","781","-b","780","-b", "779","-b","778","-b","777","-b","776","-b","775","-b","774","-b","773", "-b","772","-b","771","-b","770","-b","769","-b","768","-b","767","-b", "766","-b","765","-b","764","-b","763","-b","762","-b","761","-b","760", "-b","759","-b","758","-b","757","-b","756","-b","755","-b","754","-b", "753","-b","752","-b","751","-b","750","-b","749","-b","748","-b","747", "-b","746","-b","745","-b","744","-b","743","-b","742","-b","741","-b", "740","-b","739","-b","738","-b","737","-b","736","-b","735","-b","734", "-b","733","-b","732","-b","731","-b","730","-b","729","-b","728","-b", "727","-b","726","-b","725","-b","724","-b","723","-b","722","-b","721", "-b","720","-b","719","-b","718","-b","717","-b","716","-b","715","-b", "714","-b","713","-b","712","-b","711","-b","710","-b","709","-b","708", "-b","707","-b","706","-b","705","-b","704","-b","703","-b","702","-b", "701","-b","700","-b","699","-b","698","-b","697","-b","696","-b","695", "-b","694","-b","693","-b","692","-b","691","-b","690","-b","689","-b", "688","-b","687","-b","686","-b","685","-b","684","-b","683","-b","682", "-b","681","-b","680","-b","679","-b","678","-b","677","-b","676","-b", "675","-b","674","-b","673","-b","672","-b","671","-b","670","-b","669", "-b","668","-b","667","-b","666","-b","665","-b","664","-b","663","-b", "662","-b","661","-b","660","-b","659","-b","658","-b","657","-b","656", "-b","655","-b","654","-b","653","-b","652","-b","651","-b","650","-b", "649","-b","648","-b","647","-b","646","-b","645","-b","644","-b","643", "-b","642","-b","641","-b","640","-b","639","-b","638","-b","637","-b", "636","-b","635","-b","634","-b","633","-b","632","-b","631","-b","630", "-b","629","-b","628","-b","627","-b","626","-b","625","-b","624","-b", "623","-b","622","-b","621","-b","620","-b","619","-b","618","-b","617", "-b","616","-b","615","-b","614","-b","613","-b","612","-b","611","-b", "610","-b","609","-b","608","-b","607","-b","606","-b","605","-b","604", "-b","603","-b","602","-b","601","-b","600","-b","599","-b","598","-b", "597","-b","596","-b","595","-b","594","-b","593","-b","592","-b","591", "-b","590","-b","589","-b","588","-b","587","-b","586","-b","585","-b", "584","-b","583","-b","582","-b","581","-b","580","-b","579","-b","578", "-b","577","-b","576","-b","575","-b","574","-b","573","-b","572","-b", "571","-b","570","-b","569","-b","568","-b","567","-b","566","-b","565", "-b","564","-b","563","-b","562","-b","561","-b","560","-b","559","-b", "558","-b","557","-b","556","-b","555","-b","554","-b","553","-b","552", "-b","551","-b","550","-b","549","-b","548","-b","547","-b","546","-b", "545","-b","544","-b","543","-b","542","-b","541","-b","540","-b","539", "-b","538","-b","537","-b","536","-b","535","-b","534","-b","533","-b", "532","-b","531","-b","530","-b","529","-b","528","-b","527","-b","526", "-b","525","-b","524","-b","523","-b","522","-b","521","-b","520","-b", "519","-b","518","-b","517","-b","516","-b","515","-b","514","-b","513", "-b","512"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] PROGRESS REPORT <0.260.0> 2011-01-03 13:24:09 =============================================================================== supervisor {local,'ns_vbm_sup-default'} started [{pid,<0.8085.0>}, {name,{child_id,[1023,1022,1021,1020,1019,1018,1017,1016,1015,1014, 1013,1012,1011,1010,1009,1008,1007,1006,1005,1004, 1003,1002,1001,1000,999,998,997,996,995,994,993, 992,991,990,989,988,987,986,985,984,983,982,981, 980,979,978,977,976,975,974,973,972,971,970,969, 968,967,966,965,964,963,962,961,960,959,958,957, 956,955,954,953,952,951,950,949,948,947,946,945, 944,943,942,941,940,939,938,937,936,935,934,933, 932,931,930,929,928,927,926,925,924,923,922,921, 920,919,918,917,916,915,914,913,912,911,910,909, 908,907,906,905,904,903,902,901,900,899,898,897, 896,895,894,893,892,891,890,889,888,887,886,885, 884,883,882,881,880,879,878,877,876,875,874,873, 872,871,870,869,868,867,866,865,864,863,862,861, 860,859,858,857,856,855,854,853,852,851,850,849, 848,847,846,845,844,843,842,841,840,839,838,837, 836,835,834,833,832,831,830,829,828,827,826,825, 824,823,822,821,820,819,818,817,816,815,814,813, 812,811,810,809,808,807,806,805,804,803,802,801, 800,799,798,797,796,795,794,793,792,791,790,789, 788,787,786,785,784,783,782,781,780,779,778,777, 776,775,774,773,772,771,770,769,768,767,766,765, 764,763,762,761,760,759,758,757,756,755,754,753, 752,751,750,749,748,747,746,745,744,743,742,741, 740,739,738,737,736,735,734,733,732,731,730,729, 728,727,726,725,724,723,722,721,720,719,718,717, 716,715,714,713,712,711,710,709,708,707,706,705, 704,703,702,701,700,699,698,697,696,695,694,693, 692,691,690,689,688,687,686,685,684,683,682,681, 680,679,678,677,676,675,674,673,672,671,670,669, 668,667,666,665,664,663,662,661,660,659,658,657, 656,655,654,653,652,651,650,649,648,647,646,645, 644,643,642,641,640,639,638,637,636,635,634,633, 632,631,630,629,628,627,626,625,624,623,622,621, 620,619,618,617,616,615,614,613,612,611,610,609, 608,607,606,605,604,603,602,601,600,599,598,597, 596,595,594,593,592,591,590,589,588,587,586,585, 584,583,582,581,580,579,578,577,576,575,574,573, 572,571,570,569,568,567,566,565,564,563,562,561, 560,559,558,557,556,555,554,553,552,551,550,549, 548,547,546,545,544,543,542,541,540,539,538,537, 536,535,534,533,532,531,530,529,528,527,526,525, 524,523,522,521,520,519,518,517,516,515,514,513, 512], 'ns_1@10.2.1.101'}}, {mfa,{ns_port_server,start_link, [vbucketmigrator, "./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210", "-d","10.2.1.101:11210","-A","-v","-b", "1023","-b","1022","-b","1021","-b","1020", "-b","1019","-b","1018","-b","1017","-b", "1016","-b","1015","-b","1014","-b","1013", "-b","1012","-b","1011","-b","1010","-b", "1009","-b","1008","-b","1007","-b","1006", "-b","1005","-b","1004","-b","1003","-b", "1002","-b","1001","-b","1000","-b","999", "-b","998","-b","997","-b","996","-b","995", "-b","994","-b","993","-b","992","-b","991", "-b","990","-b","989","-b","988","-b","987", "-b","986","-b","985","-b","984","-b","983", "-b","982","-b","981","-b","980","-b","979", "-b","978","-b","977","-b","976","-b","975", "-b","974","-b","973","-b","972","-b","971", "-b","970","-b","969","-b","968","-b","967", "-b","966","-b","965","-b","964","-b","963", "-b","962","-b","961","-b","960","-b","959", "-b","958","-b","957","-b","956","-b","955", "-b","954","-b","953","-b","952","-b","951", "-b","950","-b","949","-b","948","-b","947", "-b","946","-b","945","-b","944","-b","943", "-b","942","-b","941","-b","940","-b","939", "-b","938","-b","937","-b","936","-b","935", "-b","934","-b","933","-b","932","-b","931", "-b","930","-b","929","-b","928","-b","927", "-b","926","-b","925","-b","924","-b","923", "-b","922","-b","921","-b","920","-b","919", "-b","918","-b","917","-b","916","-b","915", "-b","914","-b","913","-b","912","-b","911", "-b","910","-b","909","-b","908","-b","907", "-b","906","-b","905","-b","904","-b","903", "-b","902","-b","901","-b","900","-b","899", "-b","898","-b","897","-b","896","-b","895", "-b","894","-b","893","-b","892","-b","891", "-b","890","-b","889","-b","888","-b","887", "-b","886","-b","885","-b","884","-b","883", "-b","882","-b","881","-b","880","-b","879", "-b","878","-b","877","-b","876","-b","875", "-b","874","-b","873","-b","872","-b","871", "-b","870","-b","869","-b","868","-b","867", "-b","866","-b","865","-b","864","-b","863", "-b","862","-b","861","-b","860","-b","859", "-b","858","-b","857","-b","856","-b","855", "-b","854","-b","853","-b","852","-b","851", "-b","850","-b","849","-b","848","-b","847", "-b","846","-b","845","-b","844","-b","843", "-b","842","-b","841","-b","840","-b","839", "-b","838","-b","837","-b","836","-b","835", "-b","834","-b","833","-b","832","-b","831", "-b","830","-b","829","-b","828","-b","827", "-b","826","-b","825","-b","824","-b","823", "-b","822","-b","821","-b","820","-b","819", "-b","818","-b","817","-b","816","-b","815", "-b","814","-b","813","-b","812","-b","811", "-b","810","-b","809","-b","808","-b","807", "-b","806","-b","805","-b","804","-b","803", "-b","802","-b","801","-b","800","-b","799", "-b","798","-b","797","-b","796","-b","795", "-b","794","-b","793","-b","792","-b","791", "-b","790","-b","789","-b","788","-b","787", "-b","786","-b","785","-b","784","-b","783", "-b","782","-b","781","-b","780","-b","779", "-b","778","-b","777","-b","776","-b","775", "-b","774","-b","773","-b","772","-b","771", "-b","770","-b","769","-b","768","-b","767", "-b","766","-b","765","-b","764","-b","763", "-b","762","-b","761","-b","760","-b","759", "-b","758","-b","757","-b","756","-b","755", "-b","754","-b","753","-b","752","-b","751", "-b","750","-b","749","-b","748","-b","747", "-b","746","-b","745","-b","744","-b","743", "-b","742","-b","741","-b","740","-b","739", "-b","738","-b","737","-b","736","-b","735", "-b","734","-b","733","-b","732","-b","731", "-b","730","-b","729","-b","728","-b","727", "-b","726","-b","725","-b","724","-b","723", "-b","722","-b","721","-b","720","-b","719", "-b","718","-b","717","-b","716","-b","715", "-b","714","-b","713","-b","712","-b","711", "-b","710","-b","709","-b","708","-b","707", "-b","706","-b","705","-b","704","-b","703", "-b","702","-b","701","-b","700","-b","699", "-b","698","-b","697","-b","696","-b","695", "-b","694","-b","693","-b","692","-b","691", "-b","690","-b","689","-b","688","-b","687", "-b","686","-b","685","-b","684","-b","683", "-b","682","-b","681","-b","680","-b","679", "-b","678","-b","677","-b","676","-b","675", "-b","674","-b","673","-b","672","-b","671", "-b","670","-b","669","-b","668","-b","667", "-b","666","-b","665","-b","664","-b","663", "-b","662","-b","661","-b","660","-b","659", "-b","658","-b","657","-b","656","-b","655", "-b","654","-b","653","-b","652","-b","651", "-b","650","-b","649","-b","648","-b","647", "-b","646","-b","645","-b","644","-b","643", "-b","642","-b","641","-b","640","-b","639", "-b","638","-b","637","-b","636","-b","635", "-b","634","-b","633","-b","632","-b","631", "-b","630","-b","629","-b","628","-b","627", "-b","626","-b","625","-b","624","-b","623", "-b","622","-b","621","-b","620","-b","619", "-b","618","-b","617","-b","616","-b","615", "-b","614","-b","613","-b","612","-b","611", "-b","610","-b","609","-b","608","-b","607", "-b","606","-b","605","-b","604","-b","603", "-b","602","-b","601","-b","600","-b","599", "-b","598","-b","597","-b","596","-b","595", "-b","594","-b","593","-b","592","-b","591", "-b","590","-b","589","-b","588","-b","587", "-b","586","-b","585","-b","584","-b","583", "-b","582","-b","581","-b","580","-b","579", "-b","578","-b","577","-b","576","-b","575", "-b","574","-b","573","-b","572","-b","571", "-b","570","-b","569","-b","568","-b","567", "-b","566","-b","565","-b","564","-b","563", "-b","562","-b","561","-b","560","-b","559", "-b","558","-b","557","-b","556","-b","555", "-b","554","-b","553","-b","552","-b","551", "-b","550","-b","549","-b","548","-b","547", "-b","546","-b","545","-b","544","-b","543", "-b","542","-b","541","-b","540","-b","539", "-b","538","-b","537","-b","536","-b","535", "-b","534","-b","533","-b","532","-b","531", "-b","530","-b","529","-b","528","-b","527", "-b","526","-b","525","-b","524","-b","523", "-b","522","-b","521","-b","520","-b","519", "-b","518","-b","517","-b","516","-b","515", "-b","514","-b","513","-b","512"], [use_stdio,stderr_to_stdout, {write_data,[[],"\n"]}]]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.8066.0> 2011-01-03 13:24:09 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [511,510,509,508,507,506,505,504,503,502,501,500,499,498,497,496,495,494,493,492,491,490,489,488,487,486,485,484,483,482,481,480,479,478,477,476,475,474,473,472,471,470,469,468,467,466,465,464,463,462,461,460,459,458,457,456,455,454,453,452,451,450,449,448,447,446,445,444,443,442,441,440,439,438,437,436,435,434,433,432,431,430,429,428,427,426,425,424,423,422,421,420,419,418,417,416,415,414,413,412,411,410,409,408,407,406,405,404,403,402,401,400,399,398,397,396,395,394,393,392,391,390,389,388,387,386,385,384,383,382,381,380,379,378,377,376,375,374,373,372,371,370,369,368,367,366,365,364,363,362,361,360,359,358,357,356,355,354,353,352,351,350,349,348,347,346,345,344,343,342,341,340,339,338,337,336,335,334,333,332,331,330,329,328,327,326,325,324,323,322,321,320,319,318,317,316,315,314,313,312,311,310,309,308,307,306,305,304,303,302,301,300,299,298,297,296,295,294,293,292,291,290,289,288,287,286,285,284,283,282,281,280,279,278,277,276,275,274,273,272,271,270,269,268,267,266,265,264,263,262,261,260,259,258,257,256,255,254,253,252,251,250,249,248,247,246,245,244,243,242,241,240,239,238,237,236,235,234,233,232,231,230,229,228,227,226,225,224,223,222,221,220,219,218,217,216,215,214,213,212,211,210,209,208,207,206,205,204,203,202,201,200,199,198,197,196,195,194,193,192,191,190,189,188,187,186,185,184,183,182,181,180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165,164,163,162,161,160,159,158,157,156,155,154,153,152,151,150,149,148,147,146,145,144,143,142,141,140,139,138,137,136,135,134,133,132,131,130,129,128,127,126,125,124,123,122,121,120,119,118,117,116,115,114,113,112,111,110,109,108,107,106,105,104,103,102,101,100,99,98,97,96,95,94,93,92,91,90,89,88,87,86,85,84,83,82,81,80,79,78,77,76,75,74,73,72,71,70,69,68,67,66,65,64,63,62,61,60,59,58,57,56,55,54,53,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.101' to node 'ns_1@10.2.1.100' INFO REPORT <0.8066.0> 2011-01-03 13:24:09 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.101:11210","-d","10.2.1.100:11210","-A", "-v","-b","511","-b","510","-b","509","-b","508","-b","507","-b","506","-b", "505","-b","504","-b","503","-b","502","-b","501","-b","500","-b","499", "-b","498","-b","497","-b","496","-b","495","-b","494","-b","493","-b", "492","-b","491","-b","490","-b","489","-b","488","-b","487","-b","486", "-b","485","-b","484","-b","483","-b","482","-b","481","-b","480","-b", "479","-b","478","-b","477","-b","476","-b","475","-b","474","-b","473", "-b","472","-b","471","-b","470","-b","469","-b","468","-b","467","-b", "466","-b","465","-b","464","-b","463","-b","462","-b","461","-b","460", "-b","459","-b","458","-b","457","-b","456","-b","455","-b","454","-b", "453","-b","452","-b","451","-b","450","-b","449","-b","448","-b","447", "-b","446","-b","445","-b","444","-b","443","-b","442","-b","441","-b", "440","-b","439","-b","438","-b","437","-b","436","-b","435","-b","434", "-b","433","-b","432","-b","431","-b","430","-b","429","-b","428","-b", "427","-b","426","-b","425","-b","424","-b","423","-b","422","-b","421", "-b","420","-b","419","-b","418","-b","417","-b","416","-b","415","-b", "414","-b","413","-b","412","-b","411","-b","410","-b","409","-b","408", "-b","407","-b","406","-b","405","-b","404","-b","403","-b","402","-b", "401","-b","400","-b","399","-b","398","-b","397","-b","396","-b","395", "-b","394","-b","393","-b","392","-b","391","-b","390","-b","389","-b", "388","-b","387","-b","386","-b","385","-b","384","-b","383","-b","382", "-b","381","-b","380","-b","379","-b","378","-b","377","-b","376","-b", "375","-b","374","-b","373","-b","372","-b","371","-b","370","-b","369", "-b","368","-b","367","-b","366","-b","365","-b","364","-b","363","-b", "362","-b","361","-b","360","-b","359","-b","358","-b","357","-b","356", "-b","355","-b","354","-b","353","-b","352","-b","351","-b","350","-b", "349","-b","348","-b","347","-b","346","-b","345","-b","344","-b","343", "-b","342","-b","341","-b","340","-b","339","-b","338","-b","337","-b", "336","-b","335","-b","334","-b","333","-b","332","-b","331","-b","330", "-b","329","-b","328","-b","327","-b","326","-b","325","-b","324","-b", "323","-b","322","-b","321","-b","320","-b","319","-b","318","-b","317", "-b","316","-b","315","-b","314","-b","313","-b","312","-b","311","-b", "310","-b","309","-b","308","-b","307","-b","306","-b","305","-b","304", "-b","303","-b","302","-b","301","-b","300","-b","299","-b","298","-b", "297","-b","296","-b","295","-b","294","-b","293","-b","292","-b","291", "-b","290","-b","289","-b","288","-b","287","-b","286","-b","285","-b", "284","-b","283","-b","282","-b","281","-b","280","-b","279","-b","278", "-b","277","-b","276","-b","275","-b","274","-b","273","-b","272","-b", "271","-b","270","-b","269","-b","268","-b","267","-b","266","-b","265", "-b","264","-b","263","-b","262","-b","261","-b","260","-b","259","-b", "258","-b","257","-b","256","-b","255","-b","254","-b","253","-b","252", "-b","251","-b","250","-b","249","-b","248","-b","247","-b","246","-b", "245","-b","244","-b","243","-b","242","-b","241","-b","240","-b","239", "-b","238","-b","237","-b","236","-b","235","-b","234","-b","233","-b", "232","-b","231","-b","230","-b","229","-b","228","-b","227","-b","226", "-b","225","-b","224","-b","223","-b","222","-b","221","-b","220","-b", "219","-b","218","-b","217","-b","216","-b","215","-b","214","-b","213", "-b","212","-b","211","-b","210","-b","209","-b","208","-b","207","-b", "206","-b","205","-b","204","-b","203","-b","202","-b","201","-b","200", "-b","199","-b","198","-b","197","-b","196","-b","195","-b","194","-b", "193","-b","192","-b","191","-b","190","-b","189","-b","188","-b","187", "-b","186","-b","185","-b","184","-b","183","-b","182","-b","181","-b", "180","-b","179","-b","178","-b","177","-b","176","-b","175","-b","174", "-b","173","-b","172","-b","171","-b","170","-b","169","-b","168","-b", "167","-b","166","-b","165","-b","164","-b","163","-b","162","-b","161", "-b","160","-b","159","-b","158","-b","157","-b","156","-b","155","-b", "154","-b","153","-b","152","-b","151","-b","150","-b","149","-b","148", "-b","147","-b","146","-b","145","-b","144","-b","143","-b","142","-b", "141","-b","140","-b","139","-b","138","-b","137","-b","136","-b","135", "-b","134","-b","133","-b","132","-b","131","-b","130","-b","129","-b", "128","-b","127","-b","126","-b","125","-b","124","-b","123","-b","122", "-b","121","-b","120","-b","119","-b","118","-b","117","-b","116","-b", "115","-b","114","-b","113","-b","112","-b","111","-b","110","-b","109", "-b","108","-b","107","-b","106","-b","105","-b","104","-b","103","-b", "102","-b","101","-b","100","-b","99","-b","98","-b","97","-b","96","-b", "95","-b","94","-b","93","-b","92","-b","91","-b","90","-b","89","-b","88", "-b","87","-b","86","-b","85","-b","84","-b","83","-b","82","-b","81","-b", "80","-b","79","-b","78","-b","77","-b","76","-b","75","-b","74","-b","73", "-b","72","-b","71","-b","70","-b","69","-b","68","-b","67","-b","66","-b", "65","-b","64","-b","63","-b","62","-b","61","-b","60","-b","59","-b","58", "-b","57","-b","56","-b","55","-b","54","-b","53","-b","52","-b","51","-b", "50","-b","49","-b","48","-b","47","-b","46","-b","45","-b","44","-b","43", "-b","42","-b","41","-b","40","-b","39","-b","38","-b","37","-b","36","-b", "35","-b","34","-b","33","-b","32","-b","31","-b","30","-b","29","-b","28", "-b","27","-b","26","-b","25","-b","24","-b","23","-b","22","-b","21","-b", "20","-b","19","-b","18","-b","17","-b","16","-b","15","-b","14","-b","13", "-b","12","-b","11","-b","10","-b","9","-b","8","-b","7","-b","6","-b","5", "-b","4","-b","3","-b","2","-b","1","-b","0"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] INFO REPORT <0.8085.0> 2011-01-03 13:24:10 =============================================================================== vbucketmigrator<0.8085.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.8085.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.8085.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.8085.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8085.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8085.0>: Authenticated towards: {Sock 10.2.1.100:11210} INFO REPORT <0.105.0> 2011-01-03 13:24:10 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_638 for 1.00 secs INFO REPORT <0.259.0> 2011-01-03 13:24:11 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 640 auth_errors 0 bucket_conns 63 bytes_read 5774662140 bytes_written 151209809 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 3589416 conn_yields 217 connection_structures 235 curr_connections 85 curr_items 2464878 curr_items_tot 4012858 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 3559 ep_commit_time 0 ep_commit_time_total 1211 ep_data_age 149 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 233 ep_flush_duration_highwat 297 ep_flush_duration_total 1518 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 478571 ep_io_num_read 484890 ep_io_num_write 3442124 ep_io_read_bytes 682339408 ep_io_write_bytes 4590144811 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2353489022 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 737550 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1907955 ep_num_not_my_vbuckets 554265 ep_num_pager_runs 6 ep_num_value_ejects 1909242 ep_oom_errors 0 ep_overhead 63058871 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 95589 ep_storage_age 149 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 482378 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5000915246 ep_total_del_items 0 ep_total_enqueued 4016384 ep_total_new_items 3439841 ep_total_persisted 3442124 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2416547893 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 638 tap_mutation_received 1575031 tap_mutation_sent 3008239 tap_opaque_received 1026 tap_opaque_sent 1662 tap_vbucket_set_sent 1271 threads 4 time 1294089851 total_connections 1284 uptime 1865 version 1.4.4_304_g7d5a132 INFO REPORT <0.105.0> 2011-01-03 13:24:12 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_638 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:24:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_638 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:24:14 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_638 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:24:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_638 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:24:17 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_638 for 1.00 secs INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 0 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 1 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 2 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 3 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 4 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 5 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 6 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 7 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 8 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 9 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 10 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 11 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 12 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 13 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 14 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 15 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 16 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 17 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 18 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 19 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 20 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 21 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 22 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 23 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 24 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 25 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 26 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 27 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 28 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 29 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 30 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 31 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 32 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 33 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 34 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 35 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 36 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 37 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 38 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 39 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 40 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 41 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 42 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 43 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 44 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 45 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 46 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 47 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 48 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 49 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 50 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 51 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 52 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 53 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 54 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 55 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 56 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 57 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 58 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 59 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 60 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 61 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 62 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 63 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 64 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 65 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 66 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 67 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 68 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 69 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 70 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 512 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 513 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 514 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 515 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 516 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 517 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 518 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 519 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 520 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 521 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 522 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 523 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 524 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 525 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 526 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 527 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 528 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 529 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 530 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 531 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 532 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 533 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 534 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 535 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 536 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 537 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 538 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 539 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 540 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 541 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 542 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 543 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 544 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 545 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 546 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 547 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 548 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 549 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 550 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 551 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 552 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 553 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 554 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 555 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 556 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 557 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 558 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 559 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 560 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 561 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 562 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 563 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 564 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 565 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 566 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 567 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 568 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 569 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 570 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 571 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 572 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 573 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 574 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 575 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 576 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 577 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 578 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 579 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 580 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 581 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 582 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 583 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 584 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 585 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 586 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 587 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 588 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 589 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 590 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 591 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 592 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 593 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 594 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 595 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 596 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 597 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 598 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 599 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 600 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 601 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 602 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 603 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 604 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 605 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 606 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 607 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 608 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 609 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 610 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 611 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 612 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 613 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 614 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 615 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 616 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 617 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 618 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 619 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 620 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 621 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 622 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 623 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 624 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 625 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 626 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 627 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 628 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 629 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 630 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 631 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 632 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 633 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 634 from replica to dead because we don't have all copies INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:180: Killing replicator {child_id, [1023,1022,1021,1020,1019, 1018,1017,1016,1015,1014, 1013,1012,1011,1010,1009, 1008,1007,1006,1005,1004, 1003,1002,1001,1000,999, 998,997,996,995,994,993, 992,991,990,989,988,987, 986,985,984,983,982,981, 980,979,978,977,976,975, 974,973,972,971,970,969, 968,967,966,965,964,963, 962,961,960,959,958,957, 956,955,954,953,952,951, 950,949,948,947,946,945, 944,943,942,941,940,939, 938,937,936,935,934,933, 932,931,930,929,928,927, 926,925,924,923,922,921, 920,919,918,917,916,915, 914,913,912,911,910,909, 908,907,906,905,904,903, 902,901,900,899,898,897, 896,895,894,893,892,891, 890,889,888,887,886,885, 884,883,882,881,880,879, 878,877,876,875,874,873, 872,871,870,869,868,867, 866,865,864,863,862,861, 860,859,858,857,856,855, 854,853,852,851,850,849, 848,847,846,845,844,843, 842,841,840,839,838,837, 836,835,834,833,832,831, 830,829,828,827,826,825, 824,823,822,821,820,819, 818,817,816,815,814,813, 812,811,810,809,808,807, 806,805,804,803,802,801, 800,799,798,797,796,795, 794,793,792,791,790,789, 788,787,786,785,784,783, 782,781,780,779,778,777, 776,775,774,773,772,771, 770,769,768,767,766,765, 764,763,762,761,760,759, 758,757,756,755,754,753, 752,751,750,749,748,747, 746,745,744,743,742,741, 740,739,738,737,736,735, 734,733,732,731,730,729, 728,727,726,725,724,723, 722,721,720,719,718,717, 716,715,714,713,712,711, 710,709,708,707,706,705, 704,703,702,701,700,699, 698,697,696,695,694,693, 692,691,690,689,688,687, 686,685,684,683,682,681, 680,679,678,677,676,675, 674,673,672,671,670,669, 668,667,666,665,664,663, 662,661,660,659,658,657, 656,655,654,653,652,651, 650,649,648,647,646,645, 644,643,642,641,640,639, 638,637,636,635,634,633, 632,631,630,629,628,627, 626,625,624,623,622,621, 620,619,618,617,616,615, 614,613,612,611,610,609, 608,607,606,605,604,603, 602,601,600,599,598,597, 596,595,594,593,592,591, 590,589,588,587,586,585, 584,583,582,581,580,579, 578,577,576,575,574,573, 572,571,570,569,568,567, 566,565,564,563,562,561, 560,559,558,557,556,555, 554,553,552,551,550,549, 548,547,546,545,544,543, 542,541,540,539,538,537, 536,535,534,533,532,531, 530,529,528,527,526,525, 524,523,522,521,520,519, 518,517,516,515,514,513, 512], 'ns_1@10.2.1.101'} on node 'ns_1@10.2.1.100' INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [1023,1022,1021,1020,1019,1018,1017,1016,1015,1014,1013,1012,1011,1010,1009,1008,1007,1006,1005,1004,1003,1002,1001,1000,999,998,997,996,995,994,993,992,991,990,989,988,987,986,985,984,983,982,981,980,979,978,977,976,975,974,973,972,971,970,969,968,967,966,965,964,963,962,961,960,959,958,957,956,955,954,953,952,951,950,949,948,947,946,945,944,943,942,941,940,939,938,937,936,935,934,933,932,931,930,929,928,927,926,925,924,923,922,921,920,919,918,917,916,915,914,913,912,911,910,909,908,907,906,905,904,903,902,901,900,899,898,897,896,895,894,893,892,891,890,889,888,887,886,885,884,883,882,881,880,879,878,877,876,875,874,873,872,871,870,869,868,867,866,865,864,863,862,861,860,859,858,857,856,855,854,853,852,851,850,849,848,847,846,845,844,843,842,841,840,839,838,837,836,835,834,833,832,831,830,829,828,827,826,825,824,823,822,821,820,819,818,817,816,815,814,813,812,811,810,809,808,807,806,805,804,803,802,801,800,799,798,797,796,795,794,793,792,791,790,789,788,787,786,785,784,783,782,781,780,779,778,777,776,775,774,773,772,771,770,769,768,767,766,765,764,763,762,761,760,759,758,757,756,755,754,753,752,751,750,749,748,747,746,745,744,743,742,741,740,739,738,737,736,735,734,733,732,731,730,729,728,727,726,725,724,723,722,721,720,719,718,717,716,715,714,713,712,711,710,709,708,707,706,705,704,703,702,701,700,699,698,697,696,695,694,693,692,691,690,689,688,687,686,685,684,683,682,681,680,679,678,677,676,675,674,673,672,671,670,669,668,667,666,665,664,663,662,661,660,659,658,657,656,655,654,653,652,651,650,649,648,647,646,645,644,643,642,641,640,639,638,637,636,635] in bucket "default" from node 'ns_1@10.2.1.100' to node 'ns_1@10.2.1.101' INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210","-d","10.2.1.101:11210","-A", "-v","-b","1023","-b","1022","-b","1021","-b","1020","-b","1019","-b", "1018","-b","1017","-b","1016","-b","1015","-b","1014","-b","1013","-b", "1012","-b","1011","-b","1010","-b","1009","-b","1008","-b","1007","-b", "1006","-b","1005","-b","1004","-b","1003","-b","1002","-b","1001","-b", "1000","-b","999","-b","998","-b","997","-b","996","-b","995","-b","994", "-b","993","-b","992","-b","991","-b","990","-b","989","-b","988","-b", "987","-b","986","-b","985","-b","984","-b","983","-b","982","-b","981", "-b","980","-b","979","-b","978","-b","977","-b","976","-b","975","-b", "974","-b","973","-b","972","-b","971","-b","970","-b","969","-b","968", "-b","967","-b","966","-b","965","-b","964","-b","963","-b","962","-b", "961","-b","960","-b","959","-b","958","-b","957","-b","956","-b","955", "-b","954","-b","953","-b","952","-b","951","-b","950","-b","949","-b", "948","-b","947","-b","946","-b","945","-b","944","-b","943","-b","942", "-b","941","-b","940","-b","939","-b","938","-b","937","-b","936","-b", "935","-b","934","-b","933","-b","932","-b","931","-b","930","-b","929", "-b","928","-b","927","-b","926","-b","925","-b","924","-b","923","-b", "922","-b","921","-b","920","-b","919","-b","918","-b","917","-b","916", "-b","915","-b","914","-b","913","-b","912","-b","911","-b","910","-b", "909","-b","908","-b","907","-b","906","-b","905","-b","904","-b","903", "-b","902","-b","901","-b","900","-b","899","-b","898","-b","897","-b", "896","-b","895","-b","894","-b","893","-b","892","-b","891","-b","890", "-b","889","-b","888","-b","887","-b","886","-b","885","-b","884","-b", "883","-b","882","-b","881","-b","880","-b","879","-b","878","-b","877", "-b","876","-b","875","-b","874","-b","873","-b","872","-b","871","-b", "870","-b","869","-b","868","-b","867","-b","866","-b","865","-b","864", "-b","863","-b","862","-b","861","-b","860","-b","859","-b","858","-b", "857","-b","856","-b","855","-b","854","-b","853","-b","852","-b","851", "-b","850","-b","849","-b","848","-b","847","-b","846","-b","845","-b", "844","-b","843","-b","842","-b","841","-b","840","-b","839","-b","838", "-b","837","-b","836","-b","835","-b","834","-b","833","-b","832","-b", "831","-b","830","-b","829","-b","828","-b","827","-b","826","-b","825", "-b","824","-b","823","-b","822","-b","821","-b","820","-b","819","-b", "818","-b","817","-b","816","-b","815","-b","814","-b","813","-b","812", "-b","811","-b","810","-b","809","-b","808","-b","807","-b","806","-b", "805","-b","804","-b","803","-b","802","-b","801","-b","800","-b","799", "-b","798","-b","797","-b","796","-b","795","-b","794","-b","793","-b", "792","-b","791","-b","790","-b","789","-b","788","-b","787","-b","786", "-b","785","-b","784","-b","783","-b","782","-b","781","-b","780","-b", "779","-b","778","-b","777","-b","776","-b","775","-b","774","-b","773", "-b","772","-b","771","-b","770","-b","769","-b","768","-b","767","-b", "766","-b","765","-b","764","-b","763","-b","762","-b","761","-b","760", "-b","759","-b","758","-b","757","-b","756","-b","755","-b","754","-b", "753","-b","752","-b","751","-b","750","-b","749","-b","748","-b","747", "-b","746","-b","745","-b","744","-b","743","-b","742","-b","741","-b", "740","-b","739","-b","738","-b","737","-b","736","-b","735","-b","734", "-b","733","-b","732","-b","731","-b","730","-b","729","-b","728","-b", "727","-b","726","-b","725","-b","724","-b","723","-b","722","-b","721", "-b","720","-b","719","-b","718","-b","717","-b","716","-b","715","-b", "714","-b","713","-b","712","-b","711","-b","710","-b","709","-b","708", "-b","707","-b","706","-b","705","-b","704","-b","703","-b","702","-b", "701","-b","700","-b","699","-b","698","-b","697","-b","696","-b","695", "-b","694","-b","693","-b","692","-b","691","-b","690","-b","689","-b", "688","-b","687","-b","686","-b","685","-b","684","-b","683","-b","682", "-b","681","-b","680","-b","679","-b","678","-b","677","-b","676","-b", "675","-b","674","-b","673","-b","672","-b","671","-b","670","-b","669", "-b","668","-b","667","-b","666","-b","665","-b","664","-b","663","-b", "662","-b","661","-b","660","-b","659","-b","658","-b","657","-b","656", "-b","655","-b","654","-b","653","-b","652","-b","651","-b","650","-b", "649","-b","648","-b","647","-b","646","-b","645","-b","644","-b","643", "-b","642","-b","641","-b","640","-b","639","-b","638","-b","637","-b", "636","-b","635"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] PROGRESS REPORT <0.260.0> 2011-01-03 13:24:18 =============================================================================== supervisor {local,'ns_vbm_sup-default'} started [{pid,<0.8116.0>}, {name,{child_id,[1023,1022,1021,1020,1019,1018,1017,1016,1015,1014, 1013,1012,1011,1010,1009,1008,1007,1006,1005,1004, 1003,1002,1001,1000,999,998,997,996,995,994,993, 992,991,990,989,988,987,986,985,984,983,982,981, 980,979,978,977,976,975,974,973,972,971,970,969, 968,967,966,965,964,963,962,961,960,959,958,957, 956,955,954,953,952,951,950,949,948,947,946,945, 944,943,942,941,940,939,938,937,936,935,934,933, 932,931,930,929,928,927,926,925,924,923,922,921, 920,919,918,917,916,915,914,913,912,911,910,909, 908,907,906,905,904,903,902,901,900,899,898,897, 896,895,894,893,892,891,890,889,888,887,886,885, 884,883,882,881,880,879,878,877,876,875,874,873, 872,871,870,869,868,867,866,865,864,863,862,861, 860,859,858,857,856,855,854,853,852,851,850,849, 848,847,846,845,844,843,842,841,840,839,838,837, 836,835,834,833,832,831,830,829,828,827,826,825, 824,823,822,821,820,819,818,817,816,815,814,813, 812,811,810,809,808,807,806,805,804,803,802,801, 800,799,798,797,796,795,794,793,792,791,790,789, 788,787,786,785,784,783,782,781,780,779,778,777, 776,775,774,773,772,771,770,769,768,767,766,765, 764,763,762,761,760,759,758,757,756,755,754,753, 752,751,750,749,748,747,746,745,744,743,742,741, 740,739,738,737,736,735,734,733,732,731,730,729, 728,727,726,725,724,723,722,721,720,719,718,717, 716,715,714,713,712,711,710,709,708,707,706,705, 704,703,702,701,700,699,698,697,696,695,694,693, 692,691,690,689,688,687,686,685,684,683,682,681, 680,679,678,677,676,675,674,673,672,671,670,669, 668,667,666,665,664,663,662,661,660,659,658,657, 656,655,654,653,652,651,650,649,648,647,646,645, 644,643,642,641,640,639,638,637,636,635], 'ns_1@10.2.1.101'}}, {mfa,{ns_port_server,start_link, [vbucketmigrator, "./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210", "-d","10.2.1.101:11210","-A","-v","-b", "1023","-b","1022","-b","1021","-b","1020", "-b","1019","-b","1018","-b","1017","-b", "1016","-b","1015","-b","1014","-b","1013", "-b","1012","-b","1011","-b","1010","-b", "1009","-b","1008","-b","1007","-b","1006", "-b","1005","-b","1004","-b","1003","-b", "1002","-b","1001","-b","1000","-b","999", "-b","998","-b","997","-b","996","-b","995", "-b","994","-b","993","-b","992","-b","991", "-b","990","-b","989","-b","988","-b","987", "-b","986","-b","985","-b","984","-b","983", "-b","982","-b","981","-b","980","-b","979", "-b","978","-b","977","-b","976","-b","975", "-b","974","-b","973","-b","972","-b","971", "-b","970","-b","969","-b","968","-b","967", "-b","966","-b","965","-b","964","-b","963", "-b","962","-b","961","-b","960","-b","959", "-b","958","-b","957","-b","956","-b","955", "-b","954","-b","953","-b","952","-b","951", "-b","950","-b","949","-b","948","-b","947", "-b","946","-b","945","-b","944","-b","943", "-b","942","-b","941","-b","940","-b","939", "-b","938","-b","937","-b","936","-b","935", "-b","934","-b","933","-b","932","-b","931", "-b","930","-b","929","-b","928","-b","927", "-b","926","-b","925","-b","924","-b","923", "-b","922","-b","921","-b","920","-b","919", "-b","918","-b","917","-b","916","-b","915", "-b","914","-b","913","-b","912","-b","911", "-b","910","-b","909","-b","908","-b","907", "-b","906","-b","905","-b","904","-b","903", "-b","902","-b","901","-b","900","-b","899", "-b","898","-b","897","-b","896","-b","895", "-b","894","-b","893","-b","892","-b","891", "-b","890","-b","889","-b","888","-b","887", "-b","886","-b","885","-b","884","-b","883", "-b","882","-b","881","-b","880","-b","879", "-b","878","-b","877","-b","876","-b","875", "-b","874","-b","873","-b","872","-b","871", "-b","870","-b","869","-b","868","-b","867", "-b","866","-b","865","-b","864","-b","863", "-b","862","-b","861","-b","860","-b","859", "-b","858","-b","857","-b","856","-b","855", "-b","854","-b","853","-b","852","-b","851", "-b","850","-b","849","-b","848","-b","847", "-b","846","-b","845","-b","844","-b","843", "-b","842","-b","841","-b","840","-b","839", "-b","838","-b","837","-b","836","-b","835", "-b","834","-b","833","-b","832","-b","831", "-b","830","-b","829","-b","828","-b","827", "-b","826","-b","825","-b","824","-b","823", "-b","822","-b","821","-b","820","-b","819", "-b","818","-b","817","-b","816","-b","815", "-b","814","-b","813","-b","812","-b","811", "-b","810","-b","809","-b","808","-b","807", "-b","806","-b","805","-b","804","-b","803", "-b","802","-b","801","-b","800","-b","799", "-b","798","-b","797","-b","796","-b","795", "-b","794","-b","793","-b","792","-b","791", "-b","790","-b","789","-b","788","-b","787", "-b","786","-b","785","-b","784","-b","783", "-b","782","-b","781","-b","780","-b","779", "-b","778","-b","777","-b","776","-b","775", "-b","774","-b","773","-b","772","-b","771", "-b","770","-b","769","-b","768","-b","767", "-b","766","-b","765","-b","764","-b","763", "-b","762","-b","761","-b","760","-b","759", "-b","758","-b","757","-b","756","-b","755", "-b","754","-b","753","-b","752","-b","751", "-b","750","-b","749","-b","748","-b","747", "-b","746","-b","745","-b","744","-b","743", "-b","742","-b","741","-b","740","-b","739", "-b","738","-b","737","-b","736","-b","735", "-b","734","-b","733","-b","732","-b","731", "-b","730","-b","729","-b","728","-b","727", "-b","726","-b","725","-b","724","-b","723", "-b","722","-b","721","-b","720","-b","719", "-b","718","-b","717","-b","716","-b","715", "-b","714","-b","713","-b","712","-b","711", "-b","710","-b","709","-b","708","-b","707", "-b","706","-b","705","-b","704","-b","703", "-b","702","-b","701","-b","700","-b","699", "-b","698","-b","697","-b","696","-b","695", "-b","694","-b","693","-b","692","-b","691", "-b","690","-b","689","-b","688","-b","687", "-b","686","-b","685","-b","684","-b","683", "-b","682","-b","681","-b","680","-b","679", "-b","678","-b","677","-b","676","-b","675", "-b","674","-b","673","-b","672","-b","671", "-b","670","-b","669","-b","668","-b","667", "-b","666","-b","665","-b","664","-b","663", "-b","662","-b","661","-b","660","-b","659", "-b","658","-b","657","-b","656","-b","655", "-b","654","-b","653","-b","652","-b","651", "-b","650","-b","649","-b","648","-b","647", "-b","646","-b","645","-b","644","-b","643", "-b","642","-b","641","-b","640","-b","639", "-b","638","-b","637","-b","636","-b","635"], [use_stdio,stderr_to_stdout, {write_data,[[],"\n"]}]]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:180: Killing replicator {child_id, [511,510,509,508,507,506, 505,504,503,502,501,500, 499,498,497,496,495,494, 493,492,491,490,489,488, 487,486,485,484,483,482, 481,480,479,478,477,476, 475,474,473,472,471,470, 469,468,467,466,465,464, 463,462,461,460,459,458, 457,456,455,454,453,452, 451,450,449,448,447,446, 445,444,443,442,441,440, 439,438,437,436,435,434, 433,432,431,430,429,428, 427,426,425,424,423,422, 421,420,419,418,417,416, 415,414,413,412,411,410, 409,408,407,406,405,404, 403,402,401,400,399,398, 397,396,395,394,393,392, 391,390,389,388,387,386, 385,384,383,382,381,380, 379,378,377,376,375,374, 373,372,371,370,369,368, 367,366,365,364,363,362, 361,360,359,358,357,356, 355,354,353,352,351,350, 349,348,347,346,345,344, 343,342,341,340,339,338, 337,336,335,334,333,332, 331,330,329,328,327,326, 325,324,323,322,321,320, 319,318,317,316,315,314, 313,312,311,310,309,308, 307,306,305,304,303,302, 301,300,299,298,297,296, 295,294,293,292,291,290, 289,288,287,286,285,284, 283,282,281,280,279,278, 277,276,275,274,273,272, 271,270,269,268,267,266, 265,264,263,262,261,260, 259,258,257,256,255,254, 253,252,251,250,249,248, 247,246,245,244,243,242, 241,240,239,238,237,236, 235,234,233,232,231,230, 229,228,227,226,225,224, 223,222,221,220,219,218, 217,216,215,214,213,212, 211,210,209,208,207,206, 205,204,203,202,201,200, 199,198,197,196,195,194, 193,192,191,190,189,188, 187,186,185,184,183,182, 181,180,179,178,177,176, 175,174,173,172,171,170, 169,168,167,166,165,164, 163,162,161,160,159,158, 157,156,155,154,153,152, 151,150,149,148,147,146, 145,144,143,142,141,140, 139,138,137,136,135,134, 133,132,131,130,129,128, 127,126,125,124,123,122, 121,120,119,118,117,116, 115,114,113,112,111,110, 109,108,107,106,105,104, 103,102,101,100,99,98,97, 96,95,94,93,92,91,90,89, 88,87,86,85,84,83,82,81, 80,79,78,77,76,75,74,73, 72,71,70,69,68,67,66,65, 64,63,62,61,60,59,58,57, 56,55,54,53,52,51,50,49, 48,47,46,45,44,43,42,41, 40,39,38,37,36,35,34,33, 32,31,30,29,28,27,26,25, 24,23,22,21,20,19,18,17, 16,15,14,13,12,11,10,9,8, 7,6,5,4,3,2,1,0], 'ns_1@10.2.1.100'} on node 'ns_1@10.2.1.101' INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [511,510,509,508,507,506,505,504,503,502,501,500,499,498,497,496,495,494,493,492,491,490,489,488,487,486,485,484,483,482,481,480,479,478,477,476,475,474,473,472,471,470,469,468,467,466,465,464,463,462,461,460,459,458,457,456,455,454,453,452,451,450,449,448,447,446,445,444,443,442,441,440,439,438,437,436,435,434,433,432,431,430,429,428,427,426,425,424,423,422,421,420,419,418,417,416,415,414,413,412,411,410,409,408,407,406,405,404,403,402,401,400,399,398,397,396,395,394,393,392,391,390,389,388,387,386,385,384,383,382,381,380,379,378,377,376,375,374,373,372,371,370,369,368,367,366,365,364,363,362,361,360,359,358,357,356,355,354,353,352,351,350,349,348,347,346,345,344,343,342,341,340,339,338,337,336,335,334,333,332,331,330,329,328,327,326,325,324,323,322,321,320,319,318,317,316,315,314,313,312,311,310,309,308,307,306,305,304,303,302,301,300,299,298,297,296,295,294,293,292,291,290,289,288,287,286,285,284,283,282,281,280,279,278,277,276,275,274,273,272,271,270,269,268,267,266,265,264,263,262,261,260,259,258,257,256,255,254,253,252,251,250,249,248,247,246,245,244,243,242,241,240,239,238,237,236,235,234,233,232,231,230,229,228,227,226,225,224,223,222,221,220,219,218,217,216,215,214,213,212,211,210,209,208,207,206,205,204,203,202,201,200,199,198,197,196,195,194,193,192,191,190,189,188,187,186,185,184,183,182,181,180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165,164,163,162,161,160,159,158,157,156,155,154,153,152,151,150,149,148,147,146,145,144,143,142,141,140,139,138,137,136,135,134,133,132,131,130,129,128,127,126,125,124,123,122,121,120,119,118,117,116,115,114,113,112,111,110,109,108,107,106,105,104,103,102,101,100,99,98,97,96,95,94,93,92,91,90,89,88,87,86,85,84,83,82,81,80,79,78,77,76,75,74,73,72,71] in bucket "default" from node 'ns_1@10.2.1.101' to node 'ns_1@10.2.1.100' INFO REPORT <0.8105.0> 2011-01-03 13:24:18 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.101:11210","-d","10.2.1.100:11210","-A", "-v","-b","511","-b","510","-b","509","-b","508","-b","507","-b","506","-b", "505","-b","504","-b","503","-b","502","-b","501","-b","500","-b","499", "-b","498","-b","497","-b","496","-b","495","-b","494","-b","493","-b", "492","-b","491","-b","490","-b","489","-b","488","-b","487","-b","486", "-b","485","-b","484","-b","483","-b","482","-b","481","-b","480","-b", "479","-b","478","-b","477","-b","476","-b","475","-b","474","-b","473", "-b","472","-b","471","-b","470","-b","469","-b","468","-b","467","-b", "466","-b","465","-b","464","-b","463","-b","462","-b","461","-b","460", "-b","459","-b","458","-b","457","-b","456","-b","455","-b","454","-b", "453","-b","452","-b","451","-b","450","-b","449","-b","448","-b","447", "-b","446","-b","445","-b","444","-b","443","-b","442","-b","441","-b", "440","-b","439","-b","438","-b","437","-b","436","-b","435","-b","434", "-b","433","-b","432","-b","431","-b","430","-b","429","-b","428","-b", "427","-b","426","-b","425","-b","424","-b","423","-b","422","-b","421", "-b","420","-b","419","-b","418","-b","417","-b","416","-b","415","-b", "414","-b","413","-b","412","-b","411","-b","410","-b","409","-b","408", "-b","407","-b","406","-b","405","-b","404","-b","403","-b","402","-b", "401","-b","400","-b","399","-b","398","-b","397","-b","396","-b","395", "-b","394","-b","393","-b","392","-b","391","-b","390","-b","389","-b", "388","-b","387","-b","386","-b","385","-b","384","-b","383","-b","382", "-b","381","-b","380","-b","379","-b","378","-b","377","-b","376","-b", "375","-b","374","-b","373","-b","372","-b","371","-b","370","-b","369", "-b","368","-b","367","-b","366","-b","365","-b","364","-b","363","-b", "362","-b","361","-b","360","-b","359","-b","358","-b","357","-b","356", "-b","355","-b","354","-b","353","-b","352","-b","351","-b","350","-b", "349","-b","348","-b","347","-b","346","-b","345","-b","344","-b","343", "-b","342","-b","341","-b","340","-b","339","-b","338","-b","337","-b", "336","-b","335","-b","334","-b","333","-b","332","-b","331","-b","330", "-b","329","-b","328","-b","327","-b","326","-b","325","-b","324","-b", "323","-b","322","-b","321","-b","320","-b","319","-b","318","-b","317", "-b","316","-b","315","-b","314","-b","313","-b","312","-b","311","-b", "310","-b","309","-b","308","-b","307","-b","306","-b","305","-b","304", "-b","303","-b","302","-b","301","-b","300","-b","299","-b","298","-b", "297","-b","296","-b","295","-b","294","-b","293","-b","292","-b","291", "-b","290","-b","289","-b","288","-b","287","-b","286","-b","285","-b", "284","-b","283","-b","282","-b","281","-b","280","-b","279","-b","278", "-b","277","-b","276","-b","275","-b","274","-b","273","-b","272","-b", "271","-b","270","-b","269","-b","268","-b","267","-b","266","-b","265", "-b","264","-b","263","-b","262","-b","261","-b","260","-b","259","-b", "258","-b","257","-b","256","-b","255","-b","254","-b","253","-b","252", "-b","251","-b","250","-b","249","-b","248","-b","247","-b","246","-b", "245","-b","244","-b","243","-b","242","-b","241","-b","240","-b","239", "-b","238","-b","237","-b","236","-b","235","-b","234","-b","233","-b", "232","-b","231","-b","230","-b","229","-b","228","-b","227","-b","226", "-b","225","-b","224","-b","223","-b","222","-b","221","-b","220","-b", "219","-b","218","-b","217","-b","216","-b","215","-b","214","-b","213", "-b","212","-b","211","-b","210","-b","209","-b","208","-b","207","-b", "206","-b","205","-b","204","-b","203","-b","202","-b","201","-b","200", "-b","199","-b","198","-b","197","-b","196","-b","195","-b","194","-b", "193","-b","192","-b","191","-b","190","-b","189","-b","188","-b","187", "-b","186","-b","185","-b","184","-b","183","-b","182","-b","181","-b", "180","-b","179","-b","178","-b","177","-b","176","-b","175","-b","174", "-b","173","-b","172","-b","171","-b","170","-b","169","-b","168","-b", "167","-b","166","-b","165","-b","164","-b","163","-b","162","-b","161", "-b","160","-b","159","-b","158","-b","157","-b","156","-b","155","-b", "154","-b","153","-b","152","-b","151","-b","150","-b","149","-b","148", "-b","147","-b","146","-b","145","-b","144","-b","143","-b","142","-b", "141","-b","140","-b","139","-b","138","-b","137","-b","136","-b","135", "-b","134","-b","133","-b","132","-b","131","-b","130","-b","129","-b", "128","-b","127","-b","126","-b","125","-b","124","-b","123","-b","122", "-b","121","-b","120","-b","119","-b","118","-b","117","-b","116","-b", "115","-b","114","-b","113","-b","112","-b","111","-b","110","-b","109", "-b","108","-b","107","-b","106","-b","105","-b","104","-b","103","-b", "102","-b","101","-b","100","-b","99","-b","98","-b","97","-b","96","-b", "95","-b","94","-b","93","-b","92","-b","91","-b","90","-b","89","-b","88", "-b","87","-b","86","-b","85","-b","84","-b","83","-b","82","-b","81","-b", "80","-b","79","-b","78","-b","77","-b","76","-b","75","-b","74","-b","73", "-b","72","-b","71"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] INFO REPORT <0.8116.0> 2011-01-03 13:24:19 =============================================================================== vbucketmigrator<0.8116.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.8116.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.8116.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.8116.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8116.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8116.0>: Authenticated towards: {Sock 10.2.1.100:11210} INFO REPORT <0.105.0> 2011-01-03 13:24:19 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_639 for 1.00 secs memcached<0.105.0>: Backfilling token for eq_tapq:anon_638 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_638 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_638 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_638 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_638 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_638 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_638 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_638 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_638 went invalid. Stopping backfill. ERROR REPORT <0.105.0> 2011-01-03 13:24:19 =============================================================================== ns_1@10.2.1.100:ns_port_server:130: Dropped 503 log lines from memcached INFO REPORT <0.110.0> 2011-01-03 13:24:21 =============================================================================== ns_log: logging ns_orchestrator:4:Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] INFO REPORT <0.65.0> 2011-01-03 13:24:21 =============================================================================== config change: {node,'ns_1@10.2.1.102',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:24:21 =============================================================================== config change: {node,'ns_1@10.2.1.101',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:24:21 =============================================================================== config change: {node,'ns_1@10.2.1.100',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:24:21 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:24:21 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:24:21 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 13:24:21 =============================================================================== config change: rebalance_status -> running INFO REPORT <0.65.0> 2011-01-03 13:24:21 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:24:21 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:24:21 =============================================================================== Pushing config done INFO REPORT <0.105.0> 2011-01-03 13:24:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_639 for 1.00 secs INFO REPORT <0.8132.0> 2011-01-03 13:24:22 =============================================================================== vbucketmigrator<0.8132.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8132.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8132.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8132.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8132.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8132.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8132.0>: Starting to move bucket 635 INFO REPORT <0.8132.0> 2011-01-03 13:24:22 =============================================================================== vbucketmigrator<0.8132.0>: Bucket 635 moved to the next server vbucketmigrator<0.8132.0>: Validate bucket states vbucketmigrator<0.8132.0>: 635 ok INFO REPORT <11993.5526.0> 2011-01-03 13:24:23 =============================================================================== vbucketmigrator<0.5526.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5526.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5526.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5526.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5526.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5526.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5526.0>: Starting to move bucket 71 INFO REPORT <11993.5526.0> 2011-01-03 13:24:23 =============================================================================== vbucketmigrator<0.5526.0>: Bucket 71 moved to the next server vbucketmigrator<0.5526.0>: Validate bucket states vbucketmigrator<0.5526.0>: 71 ok INFO REPORT <0.8151.0> 2011-01-03 13:24:23 =============================================================================== vbucketmigrator<0.8151.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8151.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8151.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8151.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8151.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8151.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8151.0>: Starting to move bucket 636 INFO REPORT <0.105.0> 2011-01-03 13:24:23 =============================================================================== memcached<0.105.0>: Backfilling token for eq_tapq:anon_639 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_639 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_639 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_639 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_639 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_639 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_639 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_639 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_639 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_639 went invalid. Stopping backfill. ERROR REPORT <0.105.0> 2011-01-03 13:24:23 =============================================================================== ns_1@10.2.1.100:ns_port_server:130: Dropped 379 log lines from memcached INFO REPORT <11993.5542.0> 2011-01-03 13:24:24 =============================================================================== vbucketmigrator<0.5542.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5542.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5542.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5542.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5542.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5542.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5542.0>: Starting to move bucket 72 INFO REPORT <0.85.0> 2011-01-03 13:24:25 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.8151.0> 2011-01-03 13:24:28 =============================================================================== vbucketmigrator<0.8151.0>: Bucket 636 moved to the next server vbucketmigrator<0.8151.0>: Validate bucket states vbucketmigrator<0.8151.0>: 636 ok INFO REPORT <0.110.0> 2011-01-03 13:24:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.04166666666666663]], [['ns_1@10.2.1.101'| 0.02020202020202022]], [['ns_1@10.2.1.102'| 0.027210884353741527]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8164.0> 2011-01-03 13:24:29 =============================================================================== vbucketmigrator<0.8164.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8164.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8164.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8164.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8164.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8164.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8164.0>: Starting to move bucket 637 INFO REPORT <0.8164.0> 2011-01-03 13:24:33 =============================================================================== vbucketmigrator<0.8164.0>: Bucket 637 moved to the next server vbucketmigrator<0.8164.0>: Validate bucket states vbucketmigrator<0.8164.0>: 637 ok INFO REPORT <0.8175.0> 2011-01-03 13:24:34 =============================================================================== vbucketmigrator<0.8175.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8175.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8175.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8175.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8175.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8175.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8175.0>: Starting to move bucket 638 INFO REPORT <11993.5542.0> 2011-01-03 13:24:36 =============================================================================== vbucketmigrator<0.5542.0>: Bucket 72 moved to the next server vbucketmigrator<0.5542.0>: Validate bucket states vbucketmigrator<0.5542.0>: 72 ok INFO REPORT <11993.5568.0> 2011-01-03 13:24:37 =============================================================================== vbucketmigrator<0.5568.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5568.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5568.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5568.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5568.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5568.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5568.0>: Starting to move bucket 73 INFO REPORT <0.8175.0> 2011-01-03 13:24:38 =============================================================================== vbucketmigrator<0.8175.0>: Bucket 638 moved to the next server vbucketmigrator<0.8175.0>: Validate bucket states vbucketmigrator<0.8175.0>: 638 ok INFO REPORT <0.110.0> 2011-01-03 13:24:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.08333333333333337]], [['ns_1@10.2.1.101'| 0.030303030303030276]], [['ns_1@10.2.1.102'| 0.04761904761904767]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8193.0> 2011-01-03 13:24:39 =============================================================================== vbucketmigrator<0.8193.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8193.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8193.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8193.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8193.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8193.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8193.0>: Starting to move bucket 639 INFO REPORT <0.8193.0> 2011-01-03 13:24:43 =============================================================================== vbucketmigrator<0.8193.0>: Bucket 639 moved to the next server vbucketmigrator<0.8193.0>: Validate bucket states vbucketmigrator<0.8193.0>: 639 ok INFO REPORT <0.8210.0> 2011-01-03 13:24:44 =============================================================================== vbucketmigrator<0.8210.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8210.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8210.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8210.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8210.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8210.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8210.0>: Starting to move bucket 640 INFO REPORT <0.7270.0> 2011-01-03 13:24:46 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7260.0> 2011-01-03 13:24:47 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7251.0> 2011-01-03 13:24:47 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <11993.5568.0> 2011-01-03 13:24:47 =============================================================================== vbucketmigrator<0.5568.0>: Bucket 73 moved to the next server vbucketmigrator<0.5568.0>: Validate bucket states vbucketmigrator<0.5568.0>: 73 ok INFO REPORT <0.110.0> 2011-01-03 13:24:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.125]], [['ns_1@10.2.1.101'| 0.030303030303030276]], [['ns_1@10.2.1.102'| 0.061224489795918324]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.7162.0> 2011-01-03 13:24:48 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <11993.5597.0> 2011-01-03 13:24:48 =============================================================================== vbucketmigrator<0.5597.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5597.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5597.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5597.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5597.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5597.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5597.0>: Starting to move bucket 74 INFO REPORT <0.8210.0> 2011-01-03 13:24:48 =============================================================================== vbucketmigrator<0.8210.0>: Bucket 640 moved to the next server vbucketmigrator<0.8210.0>: Validate bucket states vbucketmigrator<0.8210.0>: 640 ok INFO REPORT <0.7227.0> 2011-01-03 13:24:49 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.7241.0> 2011-01-03 13:24:49 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.8231.0> 2011-01-03 13:24:49 =============================================================================== vbucketmigrator<0.8231.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8231.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8231.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8231.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8231.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8231.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8231.0>: Starting to move bucket 641 INFO REPORT <0.8231.0> 2011-01-03 13:24:54 =============================================================================== vbucketmigrator<0.8231.0>: Bucket 641 moved to the next server vbucketmigrator<0.8231.0>: Validate bucket states vbucketmigrator<0.8231.0>: 641 ok INFO REPORT <0.8253.0> 2011-01-03 13:24:55 =============================================================================== vbucketmigrator<0.8253.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8253.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8253.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8253.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8253.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8253.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8253.0>: Starting to move bucket 642 INFO REPORT <0.110.0> 2011-01-03 13:24:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.14583333333333337]], [['ns_1@10.2.1.101'| 0.04040404040404044]], [['ns_1@10.2.1.102'| 0.07482993197278909]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8253.0> 2011-01-03 13:24:58 =============================================================================== vbucketmigrator<0.8253.0>: Bucket 642 moved to the next server vbucketmigrator<0.8253.0>: Validate bucket states vbucketmigrator<0.8253.0>: 642 ok INFO REPORT <0.8269.0> 2011-01-03 13:24:59 =============================================================================== vbucketmigrator<0.8269.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8269.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8269.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8269.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8269.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8269.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8269.0>: Starting to move bucket 643 INFO REPORT <11993.5597.0> 2011-01-03 13:25:00 =============================================================================== vbucketmigrator<0.5597.0>: Bucket 74 moved to the next server vbucketmigrator<0.5597.0>: Validate bucket states vbucketmigrator<0.5597.0>: 74 ok INFO REPORT <11993.5630.0> 2011-01-03 13:25:01 =============================================================================== vbucketmigrator<0.5630.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5630.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5630.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5630.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5630.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5630.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5630.0>: Starting to move bucket 75 INFO REPORT <0.8269.0> 2011-01-03 13:25:04 =============================================================================== vbucketmigrator<0.8269.0>: Bucket 643 moved to the next server vbucketmigrator<0.8269.0>: Validate bucket states vbucketmigrator<0.8269.0>: 643 ok INFO REPORT <0.8281.0> 2011-01-03 13:25:06 =============================================================================== vbucketmigrator<0.8281.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8281.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8281.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8281.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8281.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8281.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8281.0>: Starting to move bucket 644 INFO REPORT <0.93.0> 2011-01-03 13:25:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89907,405401}}, {active_buckets,["default"]}, {memory, [{total,22209112}, {processes,14306236}, {processes_used,14288252}, {system,7902876}, {atom,560301}, {atom_used,557531}, {binary,173424}, {code,4570913}, {ets,1177788}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1919}, {memory_data,{4284698624,4137054208,{<0.299.0>,3328596}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,113291264}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1910394,0}}, {context_switches,{613825,0}}, {garbage_collection,{113438,1175862474,0}}, {io,{{input,53410832},{output,27676966}}}, {reductions,{525367191,2200862}}, {run_queue,0}, {runtime,{26317,187}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89908,388401}}, {active_buckets,["default"]}, {memory, [{total,18349504}, {processes,10336156}, {processes_used,10327476}, {system,8013348}, {atom,559813}, {atom_used,556363}, {binary,349744}, {code,4551541}, {ets,1137260}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1850}, {memory_data,{4284698624,4237815808,{<11993.387.0>,2357452}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,53641216}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1840895,0}}, {context_switches,{318298,0}}, {garbage_collection,{79603,690148123,0}}, {io,{{input,42560651},{output,20440204}}}, {reductions,{201027887,2028843}}, {run_queue,0}, {runtime,{15366,110}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89908,154400}}, {active_buckets,["default"]}, {memory, [{total,13688872}, {processes,6303804}, {processes_used,6290380}, {system,7385068}, {atom,541077}, {atom_used,528744}, {binary,392232}, {code,4280811}, {ets,759612}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,838}, {memory_data,{4284698624,2142031872,{<10870.218.0>,671580}}}, {disk_data, [{"C:\\",49423972,39},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,2052374528}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{830315,0}}, {context_switches,{89809,0}}, {garbage_collection,{23204,111637811,0}}, {io,{{input,11315667},{output,7330508}}}, {reductions,{46036412,573896}}, {run_queue,0}, {runtime,{4336,78}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:25:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.1875]], [['ns_1@10.2.1.101'| 0.0505050505050505]], [['ns_1@10.2.1.102'| 0.09523809523809523]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8281.0> 2011-01-03 13:25:10 =============================================================================== vbucketmigrator<0.8281.0>: Bucket 644 moved to the next server vbucketmigrator<0.8281.0>: Validate bucket states vbucketmigrator<0.8281.0>: 644 ok INFO REPORT <0.8293.0> 2011-01-03 13:25:11 =============================================================================== vbucketmigrator<0.8293.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8293.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8293.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8293.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8293.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8293.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8293.0>: Starting to move bucket 645 INFO REPORT <11993.5630.0> 2011-01-03 13:25:12 =============================================================================== vbucketmigrator<0.5630.0>: Bucket 75 moved to the next server vbucketmigrator<0.5630.0>: Validate bucket states vbucketmigrator<0.5630.0>: 75 ok INFO REPORT <11993.5669.0> 2011-01-03 13:25:13 =============================================================================== vbucketmigrator<0.5669.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5669.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5669.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5669.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5669.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5669.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5669.0>: Starting to move bucket 76 INFO REPORT <0.8293.0> 2011-01-03 13:25:15 =============================================================================== vbucketmigrator<0.8293.0>: Bucket 645 moved to the next server vbucketmigrator<0.8293.0>: Validate bucket states vbucketmigrator<0.8293.0>: 645 ok INFO REPORT <0.8308.0> 2011-01-03 13:25:16 =============================================================================== vbucketmigrator<0.8308.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8308.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8308.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8308.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8308.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8308.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8308.0>: Starting to move bucket 646 INFO REPORT <0.110.0> 2011-01-03 13:25:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.22916666666666663]], [['ns_1@10.2.1.101'| 0.06060606060606055]], [['ns_1@10.2.1.102'| 0.11564625850340138]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:25:21 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.8308.0> 2011-01-03 13:25:21 =============================================================================== vbucketmigrator<0.8308.0>: Bucket 646 moved to the next server vbucketmigrator<0.8308.0>: Validate bucket states vbucketmigrator<0.8308.0>: 646 ok INFO REPORT <0.8322.0> 2011-01-03 13:25:22 =============================================================================== vbucketmigrator<0.8322.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8322.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8322.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8322.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8322.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8322.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8322.0>: Starting to move bucket 647 INFO REPORT <11993.5669.0> 2011-01-03 13:25:25 =============================================================================== vbucketmigrator<0.5669.0>: Bucket 76 moved to the next server vbucketmigrator<0.5669.0>: Validate bucket states vbucketmigrator<0.5669.0>: 76 ok INFO REPORT <0.8322.0> 2011-01-03 13:25:26 =============================================================================== vbucketmigrator<0.8322.0>: Bucket 647 moved to the next server vbucketmigrator<0.8322.0>: Validate bucket states vbucketmigrator<0.8322.0>: 647 ok INFO REPORT <11993.5702.0> 2011-01-03 13:25:26 =============================================================================== vbucketmigrator<0.5702.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5702.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5702.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5702.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5702.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5702.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5702.0>: Starting to move bucket 77 INFO REPORT <0.8335.0> 2011-01-03 13:25:27 =============================================================================== vbucketmigrator<0.8335.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8335.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8335.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8335.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8335.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8335.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8335.0>: Starting to move bucket 648 INFO REPORT <0.110.0> 2011-01-03 13:25:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.27083333333333337]], [['ns_1@10.2.1.101'| 0.07070707070707072]], [['ns_1@10.2.1.102'| 0.13605442176870752]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8335.0> 2011-01-03 13:25:31 =============================================================================== vbucketmigrator<0.8335.0>: Bucket 648 moved to the next server vbucketmigrator<0.8335.0>: Validate bucket states vbucketmigrator<0.8335.0>: 648 ok INFO REPORT <0.8353.0> 2011-01-03 13:25:33 =============================================================================== vbucketmigrator<0.8353.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8353.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8353.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8353.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8353.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8353.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8353.0>: Starting to move bucket 649 INFO REPORT <0.8353.0> 2011-01-03 13:25:36 =============================================================================== vbucketmigrator<0.8353.0>: Bucket 649 moved to the next server vbucketmigrator<0.8353.0>: Validate bucket states vbucketmigrator<0.8353.0>: 649 ok INFO REPORT <0.8361.0> 2011-01-03 13:25:37 =============================================================================== vbucketmigrator<0.8361.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8361.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8361.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8361.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8361.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8361.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8361.0>: Starting to move bucket 650 INFO REPORT <11993.5702.0> 2011-01-03 13:25:38 =============================================================================== vbucketmigrator<0.5702.0>: Bucket 77 moved to the next server vbucketmigrator<0.5702.0>: Validate bucket states vbucketmigrator<0.5702.0>: 77 ok INFO REPORT <0.110.0> 2011-01-03 13:25:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.33333333333333337]], [['ns_1@10.2.1.101'| 0.07070707070707072]], [['ns_1@10.2.1.102'| 0.15646258503401356]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.5727.0> 2011-01-03 13:25:40 =============================================================================== vbucketmigrator<0.5727.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5727.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5727.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5727.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5727.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5727.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5727.0>: Starting to move bucket 78 INFO REPORT <0.8361.0> 2011-01-03 13:25:41 =============================================================================== vbucketmigrator<0.8361.0>: Bucket 650 moved to the next server vbucketmigrator<0.8361.0>: Validate bucket states vbucketmigrator<0.8361.0>: 650 ok INFO REPORT <0.8375.0> 2011-01-03 13:25:43 =============================================================================== vbucketmigrator<0.8375.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8375.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8375.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8375.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8375.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8375.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8375.0>: Starting to move bucket 651 INFO REPORT <0.8375.0> 2011-01-03 13:25:47 =============================================================================== vbucketmigrator<0.8375.0>: Bucket 651 moved to the next server vbucketmigrator<0.8375.0>: Validate bucket states vbucketmigrator<0.8375.0>: 651 ok INFO REPORT <0.110.0> 2011-01-03 13:25:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.35416666666666663]], [['ns_1@10.2.1.101'| 0.08080808080808077]], [['ns_1@10.2.1.102'| 0.17006802721088432]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8387.0> 2011-01-03 13:25:48 =============================================================================== vbucketmigrator<0.8387.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8387.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8387.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8387.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8387.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8387.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8387.0>: Starting to move bucket 652 INFO REPORT <0.259.0> 2011-01-03 13:25:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 660 auth_errors 0 bucket_conns 2 bytes_read 5781423097 bytes_written 154620488 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 3677966 conn_yields 238 connection_structures 235 curr_connections 24 curr_items 2440646 curr_items_tot 4099756 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 3980 ep_commit_time 0 ep_commit_time_total 1274 ep_data_age 253 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 233 ep_flush_duration_highwat 297 ep_flush_duration_total 1518 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 59294 ep_io_num_read 595251 ep_io_num_write 3861399 ep_io_read_bytes 808547944 ep_io_write_bytes 4624259821 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2360804044 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 704843 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1937811 ep_num_not_my_vbuckets 561512 ep_num_pager_runs 6 ep_num_value_ejects 1939180 ep_oom_errors 0 ep_overhead 47028396 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 182697 ep_storage_age 252 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 588995 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5011079772 ep_total_del_items 0 ep_total_enqueued 4103492 ep_total_new_items 3858287 ep_total_persisted 3861399 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2407832440 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 657 tap_mutation_received 1576969 tap_mutation_sent 3200762 tap_opaque_received 1468 tap_opaque_sent 2070 tap_vbucket_set_sent 1306 threads 4 time 1294089951 total_connections 1304 uptime 1965 version 1.4.4_304_g7d5a132 INFO REPORT <11993.5727.0> 2011-01-03 13:25:52 =============================================================================== vbucketmigrator<0.5727.0>: Bucket 78 moved to the next server vbucketmigrator<0.5727.0>: Validate bucket states vbucketmigrator<0.5727.0>: 78 ok INFO REPORT <0.8387.0> 2011-01-03 13:25:53 =============================================================================== vbucketmigrator<0.8387.0>: Bucket 652 moved to the next server vbucketmigrator<0.8387.0>: Validate bucket states vbucketmigrator<0.8387.0>: 652 ok INFO REPORT <0.8404.0> 2011-01-03 13:25:54 =============================================================================== vbucketmigrator<0.8404.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8404.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8404.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8404.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8404.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8404.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8404.0>: Starting to move bucket 653 INFO REPORT <11993.5757.0> 2011-01-03 13:25:54 =============================================================================== vbucketmigrator<0.5757.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5757.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5757.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5757.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5757.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5757.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5757.0>: Starting to move bucket 79 INFO REPORT <0.110.0> 2011-01-03 13:25:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.375]], [['ns_1@10.2.1.101'| 0.09090909090909094]], [['ns_1@10.2.1.102'| 0.18367346938775508]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8404.0> 2011-01-03 13:25:58 =============================================================================== vbucketmigrator<0.8404.0>: Bucket 653 moved to the next server vbucketmigrator<0.8404.0>: Validate bucket states vbucketmigrator<0.8404.0>: 653 ok INFO REPORT <0.8418.0> 2011-01-03 13:25:59 =============================================================================== vbucketmigrator<0.8418.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8418.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8418.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8418.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8418.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8418.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8418.0>: Starting to move bucket 654 INFO REPORT <0.8418.0> 2011-01-03 13:26:03 =============================================================================== vbucketmigrator<0.8418.0>: Bucket 654 moved to the next server vbucketmigrator<0.8418.0>: Validate bucket states vbucketmigrator<0.8418.0>: 654 ok INFO REPORT <0.8428.0> 2011-01-03 13:26:04 =============================================================================== vbucketmigrator<0.8428.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8428.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8428.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8428.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8428.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8428.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8428.0>: Starting to move bucket 655 INFO REPORT <0.8428.0> 2011-01-03 13:26:07 =============================================================================== vbucketmigrator<0.8428.0>: Bucket 655 moved to the next server vbucketmigrator<0.8428.0>: Validate bucket states vbucketmigrator<0.8428.0>: 655 ok INFO REPORT <11993.5757.0> 2011-01-03 13:26:07 =============================================================================== vbucketmigrator<0.5757.0>: Bucket 79 moved to the next server vbucketmigrator<0.5757.0>: Validate bucket states vbucketmigrator<0.5757.0>: 79 ok INFO REPORT <0.8436.0> 2011-01-03 13:26:08 =============================================================================== vbucketmigrator<0.8436.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8436.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8436.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8436.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8436.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8436.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8436.0>: Starting to move bucket 656 INFO REPORT <0.93.0> 2011-01-03 13:26:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,89967,403401}}, {active_buckets,["default"]}, {memory, [{total,17441224}, {processes,9536684}, {processes_used,9518196}, {system,7904540}, {atom,560301}, {atom_used,557531}, {binary,176112}, {code,4570913}, {ets,1176724}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1979}, {memory_data,{4284698624,4174278656,{<0.7988.0>,5385568}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,101064704}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1970392,0}}, {context_switches,{627364,0}}, {garbage_collection,{115746,1205781983,0}}, {io,{{input,54434025},{output,28194996}}}, {reductions,{533632293,2108209}}, {run_queue,0}, {runtime,{26769,140}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,89968,386401}}, {active_buckets,["default"]}, {memory, [{total,17564512}, {processes,9486260}, {processes_used,9477580}, {system,8078252}, {atom,559813}, {atom_used,556363}, {binary,379304}, {code,4551541}, {ets,1173036}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1910}, {memory_data,{4284698624,4238913536,{<11993.387.0>,2357452}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,64897024}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1900893,0}}, {context_switches,{329305,0}}, {garbage_collection,{82017,713355540,0}}, {io,{{input,44327633},{output,21909983}}}, {reductions,{208656884,2031711}}, {run_queue,0}, {runtime,{15943,141}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,89968,152401}}, {active_buckets,["default"]}, {memory, [{total,13302416}, {processes,5995204}, {processes_used,5981780}, {system,7307212}, {atom,541077}, {atom_used,528744}, {binary,315232}, {code,4280811}, {ets,758500}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,898}, {memory_data,{4284698624,2247819264,{<10870.307.0>,1271680}}}, {disk_data, [{"C:\\",49423972,39},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1942175744}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{890313,0}}, {context_switches,{95347,0}}, {garbage_collection,{24668,119905039,0}}, {io,{{input,11594439},{output,7598655}}}, {reductions,{49525684,574558}}, {run_queue,0}, {runtime,{4976,140}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:26:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.45833333333333337]], [['ns_1@10.2.1.101'| 0.09090909090909094]], [['ns_1@10.2.1.102'| 0.2108843537414966]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.5788.0> 2011-01-03 13:26:09 =============================================================================== vbucketmigrator<0.5788.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5788.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5788.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5788.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5788.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5788.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5788.0>: Starting to move bucket 80 INFO REPORT <0.85.0> 2011-01-03 13:26:12 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.8436.0> 2011-01-03 13:26:12 =============================================================================== vbucketmigrator<0.8436.0>: Bucket 656 moved to the next server vbucketmigrator<0.8436.0>: Validate bucket states vbucketmigrator<0.8436.0>: 656 ok INFO REPORT <0.8470.0> 2011-01-03 13:26:13 =============================================================================== vbucketmigrator<0.8470.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8470.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8470.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8470.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8470.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8470.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8470.0>: Starting to move bucket 657 INFO REPORT <0.8470.0> 2011-01-03 13:26:17 =============================================================================== vbucketmigrator<0.8470.0>: Bucket 657 moved to the next server vbucketmigrator<0.8470.0>: Validate bucket states vbucketmigrator<0.8470.0>: 657 ok INFO REPORT <0.8479.0> 2011-01-03 13:26:18 =============================================================================== vbucketmigrator<0.8479.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8479.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8479.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8479.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8479.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8479.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8479.0>: Starting to move bucket 658 INFO REPORT <0.110.0> 2011-01-03 13:26:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.47916666666666663]], [['ns_1@10.2.1.101'| 0.101010101010101]], [['ns_1@10.2.1.102'| 0.22448979591836737]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.5788.0> 2011-01-03 13:26:20 =============================================================================== vbucketmigrator<0.5788.0>: Bucket 80 moved to the next server vbucketmigrator<0.5788.0>: Validate bucket states vbucketmigrator<0.5788.0>: 80 ok INFO REPORT <11993.5823.0> 2011-01-03 13:26:21 =============================================================================== vbucketmigrator<0.5823.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5823.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5823.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5823.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5823.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5823.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5823.0>: Starting to move bucket 81 INFO REPORT <0.8479.0> 2011-01-03 13:26:22 =============================================================================== vbucketmigrator<0.8479.0>: Bucket 658 moved to the next server vbucketmigrator<0.8479.0>: Validate bucket states vbucketmigrator<0.8479.0>: 658 ok INFO REPORT <0.85.0> 2011-01-03 13:26:22 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.8492.0> 2011-01-03 13:26:23 =============================================================================== vbucketmigrator<0.8492.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8492.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8492.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8492.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8492.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8492.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8492.0>: Starting to move bucket 659 INFO REPORT <0.8492.0> 2011-01-03 13:26:28 =============================================================================== vbucketmigrator<0.8492.0>: Bucket 659 moved to the next server vbucketmigrator<0.8492.0>: Validate bucket states vbucketmigrator<0.8492.0>: 659 ok INFO REPORT <0.110.0> 2011-01-03 13:26:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.5208333333333333]], [['ns_1@10.2.1.101'| 0.11111111111111116]], [['ns_1@10.2.1.102'| 0.24489795918367352]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.5823.0> 2011-01-03 13:26:28 =============================================================================== vbucketmigrator<0.5823.0>: Bucket 81 moved to the next server vbucketmigrator<0.5823.0>: Validate bucket states vbucketmigrator<0.5823.0>: 81 ok INFO REPORT <0.8505.0> 2011-01-03 13:26:29 =============================================================================== vbucketmigrator<0.8505.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8505.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8505.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8505.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8505.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8505.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8505.0>: Starting to move bucket 660 INFO REPORT <11993.5841.0> 2011-01-03 13:26:29 =============================================================================== vbucketmigrator<0.5841.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5841.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5841.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5841.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5841.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5841.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5841.0>: Starting to move bucket 82 INFO REPORT <0.8505.0> 2011-01-03 13:26:33 =============================================================================== vbucketmigrator<0.8505.0>: Bucket 660 moved to the next server vbucketmigrator<0.8505.0>: Validate bucket states vbucketmigrator<0.8505.0>: 660 ok INFO REPORT <0.8525.0> 2011-01-03 13:26:34 =============================================================================== vbucketmigrator<0.8525.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8525.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8525.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8525.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8525.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8525.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8525.0>: Starting to move bucket 661 INFO REPORT <11993.5841.0> 2011-01-03 13:26:36 =============================================================================== vbucketmigrator<0.5841.0>: Bucket 82 moved to the next server vbucketmigrator<0.5841.0>: Validate bucket states vbucketmigrator<0.5841.0>: 82 ok INFO REPORT <0.85.0> 2011-01-03 13:26:37 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.5858.0> 2011-01-03 13:26:37 =============================================================================== vbucketmigrator<0.5858.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5858.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5858.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5858.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5858.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5858.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5858.0>: Starting to move bucket 83 INFO REPORT <0.110.0> 2011-01-03 13:26:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.5625]], [['ns_1@10.2.1.101'| 0.12121212121212122]], [['ns_1@10.2.1.102'| 0.26530612244897955]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8525.0> 2011-01-03 13:26:39 =============================================================================== vbucketmigrator<0.8525.0>: Bucket 661 moved to the next server vbucketmigrator<0.8525.0>: Validate bucket states vbucketmigrator<0.8525.0>: 661 ok INFO REPORT <0.8539.0> 2011-01-03 13:26:40 =============================================================================== vbucketmigrator<0.8539.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8539.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8539.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8539.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8539.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8539.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8539.0>: Starting to move bucket 662 INFO REPORT <11993.5858.0> 2011-01-03 13:26:43 =============================================================================== vbucketmigrator<0.5858.0>: Bucket 83 moved to the next server vbucketmigrator<0.5858.0>: Validate bucket states vbucketmigrator<0.5858.0>: 83 ok INFO REPORT <11993.5872.0> 2011-01-03 13:26:44 =============================================================================== vbucketmigrator<0.5872.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5872.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5872.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5872.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5872.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5872.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5872.0>: Starting to move bucket 84 INFO REPORT <0.8539.0> 2011-01-03 13:26:45 =============================================================================== vbucketmigrator<0.8539.0>: Bucket 662 moved to the next server vbucketmigrator<0.8539.0>: Validate bucket states vbucketmigrator<0.8539.0>: 662 ok INFO REPORT <0.8561.0> 2011-01-03 13:26:46 =============================================================================== vbucketmigrator<0.8561.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8561.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8561.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8561.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8561.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8561.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8561.0>: Starting to move bucket 663 INFO REPORT <0.85.0> 2011-01-03 13:26:47 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:26:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.5833333333333333]], [['ns_1@10.2.1.101'| 0.14141414141414144]], [['ns_1@10.2.1.102'| 0.2857142857142857]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8561.0> 2011-01-03 13:26:50 =============================================================================== vbucketmigrator<0.8561.0>: Bucket 663 moved to the next server vbucketmigrator<0.8561.0>: Validate bucket states vbucketmigrator<0.8561.0>: 663 ok INFO REPORT <0.8571.0> 2011-01-03 13:26:51 =============================================================================== vbucketmigrator<0.8571.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8571.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8571.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8571.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8571.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8571.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8571.0>: Starting to move bucket 664 INFO REPORT <11993.5872.0> 2011-01-03 13:26:51 =============================================================================== vbucketmigrator<0.5872.0>: Bucket 84 moved to the next server vbucketmigrator<0.5872.0>: Validate bucket states vbucketmigrator<0.5872.0>: 84 ok INFO REPORT <11993.5896.0> 2011-01-03 13:26:52 =============================================================================== vbucketmigrator<0.5896.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5896.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5896.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5896.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5896.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5896.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5896.0>: Starting to move bucket 85 INFO REPORT <0.8571.0> 2011-01-03 13:26:55 =============================================================================== vbucketmigrator<0.8571.0>: Bucket 664 moved to the next server vbucketmigrator<0.8571.0>: Validate bucket states vbucketmigrator<0.8571.0>: 664 ok INFO REPORT <0.8586.0> 2011-01-03 13:26:56 =============================================================================== vbucketmigrator<0.8586.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8586.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8586.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8586.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8586.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8586.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8586.0>: Starting to move bucket 665 INFO REPORT <0.110.0> 2011-01-03 13:26:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.625]], [['ns_1@10.2.1.101'| 0.1515151515151515]], [['ns_1@10.2.1.102'| 0.30612244897959184]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.5896.0> 2011-01-03 13:26:58 =============================================================================== vbucketmigrator<0.5896.0>: Bucket 85 moved to the next server vbucketmigrator<0.5896.0>: Validate bucket states vbucketmigrator<0.5896.0>: 85 ok INFO REPORT <11993.5913.0> 2011-01-03 13:26:59 =============================================================================== vbucketmigrator<0.5913.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5913.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5913.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5913.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5913.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5913.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5913.0>: Starting to move bucket 86 INFO REPORT <0.8586.0> 2011-01-03 13:27:00 =============================================================================== vbucketmigrator<0.8586.0>: Bucket 665 moved to the next server vbucketmigrator<0.8586.0>: Validate bucket states vbucketmigrator<0.8586.0>: 665 ok INFO REPORT <0.8601.0> 2011-01-03 13:27:01 =============================================================================== vbucketmigrator<0.8601.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8601.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8601.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8601.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8601.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8601.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8601.0>: Starting to move bucket 666 INFO REPORT <0.8601.0> 2011-01-03 13:27:05 =============================================================================== vbucketmigrator<0.8601.0>: Bucket 666 moved to the next server vbucketmigrator<0.8601.0>: Validate bucket states vbucketmigrator<0.8601.0>: 666 ok INFO REPORT <11993.5913.0> 2011-01-03 13:27:06 =============================================================================== vbucketmigrator<0.5913.0>: Bucket 86 moved to the next server vbucketmigrator<0.5913.0>: Validate bucket states vbucketmigrator<0.5913.0>: 86 ok INFO REPORT <0.8617.0> 2011-01-03 13:27:06 =============================================================================== vbucketmigrator<0.8617.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8617.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8617.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8617.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8617.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8617.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8617.0>: Starting to move bucket 667 INFO REPORT <11993.5930.0> 2011-01-03 13:27:07 =============================================================================== vbucketmigrator<0.5930.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5930.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5930.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5930.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5930.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5930.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5930.0>: Starting to move bucket 87 INFO REPORT <0.93.0> 2011-01-03 13:27:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90027,401401}}, {active_buckets,["default"]}, {memory, [{total,20602808}, {processes,12622988}, {processes_used,12606324}, {system,7979820}, {atom,560301}, {atom_used,557531}, {binary,214256}, {code,4570913}, {ets,1212140}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2039}, {memory_data,{4284698624,4184002560,{<0.299.0>,2057352}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,87531520}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2030390,0}}, {context_switches,{642250,0}}, {garbage_collection,{118653,1236699509,0}}, {io,{{input,56322410},{output,29488639}}}, {reductions,{542521188,2181934}}, {run_queue,0}, {runtime,{27268,187}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90028,383400}}, {active_buckets,["default"]}, {memory, [{total,17187440}, {processes,9106692}, {processes_used,9098012}, {system,8080748}, {atom,559813}, {atom_used,556363}, {binary,347592}, {code,4551541}, {ets,1206820}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1970}, {memory_data,{4284698624,4225822720,{<11993.387.0>,2357452}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,37367808}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1960891,0}}, {context_switches,{340244,0}}, {garbage_collection,{84265,737067056,0}}, {io,{{input,45387960},{output,22335172}}}, {reductions,{216273834,2036616}}, {run_queue,0}, {runtime,{16395,140}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90028,149400}}, {active_buckets,["default"]}, {memory, [{total,13365568}, {processes,6024692}, {processes_used,6011268}, {system,7340876}, {atom,541077}, {atom_used,528744}, {binary,313296}, {code,4280811}, {ets,794308}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,958}, {memory_data,{4284698624,2363174912,{<10870.307.0>,1271680}}}, {disk_data, [{"C:\\",49423972,39},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1817407488}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{950327,0}}, {context_switches,{101197,0}}, {garbage_collection,{26634,128551576,0}}, {io,{{input,12620439},{output,8597191}}}, {reductions,{53087416,587595}}, {run_queue,0}, {runtime,{5600,78}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:27:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.6875]], [['ns_1@10.2.1.101'| 0.16161616161616166]], [['ns_1@10.2.1.102'| 0.33333333333333337]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8617.0> 2011-01-03 13:27:10 =============================================================================== vbucketmigrator<0.8617.0>: Bucket 667 moved to the next server vbucketmigrator<0.8617.0>: Validate bucket states vbucketmigrator<0.8617.0>: 667 ok INFO REPORT <0.85.0> 2011-01-03 13:27:10 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.8629.0> 2011-01-03 13:27:11 =============================================================================== vbucketmigrator<0.8629.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8629.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8629.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8629.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8629.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8629.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8629.0>: Starting to move bucket 668 INFO REPORT <11993.5930.0> 2011-01-03 13:27:13 =============================================================================== vbucketmigrator<0.5930.0>: Bucket 87 moved to the next server vbucketmigrator<0.5930.0>: Validate bucket states vbucketmigrator<0.5930.0>: 87 ok INFO REPORT <11993.5947.0> 2011-01-03 13:27:14 =============================================================================== vbucketmigrator<0.5947.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5947.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5947.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5947.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5947.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5947.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5947.0>: Starting to move bucket 88 INFO REPORT <0.8629.0> 2011-01-03 13:27:15 =============================================================================== vbucketmigrator<0.8629.0>: Bucket 668 moved to the next server vbucketmigrator<0.8629.0>: Validate bucket states vbucketmigrator<0.8629.0>: 668 ok INFO REPORT <0.8641.0> 2011-01-03 13:27:16 =============================================================================== vbucketmigrator<0.8641.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8641.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8641.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8641.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8641.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8641.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8641.0>: Starting to move bucket 669 INFO REPORT <0.110.0> 2011-01-03 13:27:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.7083333333333333]], [['ns_1@10.2.1.101'| 0.18181818181818177]], [['ns_1@10.2.1.102'| 0.3537414965986394]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8641.0> 2011-01-03 13:27:18 =============================================================================== vbucketmigrator<0.8641.0>: Bucket 669 moved to the next server vbucketmigrator<0.8641.0>: Validate bucket states vbucketmigrator<0.8641.0>: 669 ok INFO REPORT <0.8651.0> 2011-01-03 13:27:19 =============================================================================== vbucketmigrator<0.8651.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8651.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8651.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8651.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8651.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8651.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8651.0>: Starting to move bucket 670 INFO REPORT <0.8651.0> 2011-01-03 13:27:20 =============================================================================== vbucketmigrator<0.8651.0>: Bucket 670 moved to the next server vbucketmigrator<0.8651.0>: Validate bucket states vbucketmigrator<0.8651.0>: 670 ok INFO REPORT <0.8653.0> 2011-01-03 13:27:21 =============================================================================== vbucketmigrator<0.8653.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8653.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8653.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8653.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8653.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8653.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8653.0>: Starting to move bucket 671 INFO REPORT <11993.5947.0> 2011-01-03 13:27:21 =============================================================================== vbucketmigrator<0.5947.0>: Bucket 88 moved to the next server vbucketmigrator<0.5947.0>: Validate bucket states vbucketmigrator<0.5947.0>: 88 ok INFO REPORT <11993.5967.0> 2011-01-03 13:27:22 =============================================================================== vbucketmigrator<0.5967.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5967.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5967.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5967.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5967.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5967.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5967.0>: Starting to move bucket 89 INFO REPORT <0.8653.0> 2011-01-03 13:27:23 =============================================================================== vbucketmigrator<0.8653.0>: Bucket 671 moved to the next server vbucketmigrator<0.8653.0>: Validate bucket states vbucketmigrator<0.8653.0>: 671 ok INFO REPORT <0.8659.0> 2011-01-03 13:27:24 =============================================================================== vbucketmigrator<0.8659.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8659.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8659.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8659.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8659.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8659.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8659.0>: Starting to move bucket 672 INFO REPORT <0.8659.0> 2011-01-03 13:27:25 =============================================================================== vbucketmigrator<0.8659.0>: Bucket 672 moved to the next server vbucketmigrator<0.8659.0>: Validate bucket states vbucketmigrator<0.8659.0>: 672 ok INFO REPORT <0.8670.0> 2011-01-03 13:27:26 =============================================================================== vbucketmigrator<0.8670.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8670.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8670.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8670.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8670.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8670.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8670.0>: Starting to move bucket 673 INFO REPORT <0.8670.0> 2011-01-03 13:27:26 =============================================================================== vbucketmigrator<0.8670.0>: Bucket 673 moved to the next server vbucketmigrator<0.8670.0>: Validate bucket states vbucketmigrator<0.8670.0>: 673 ok INFO REPORT <0.8675.0> 2011-01-03 13:27:28 =============================================================================== vbucketmigrator<0.8675.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8675.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8675.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8675.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8675.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8675.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8675.0>: Starting to move bucket 674 INFO REPORT <0.110.0> 2011-01-03 13:27:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.8125]], [['ns_1@10.2.1.101'| 0.19191919191919193]], [['ns_1@10.2.1.102'| 0.3945578231292517]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.5967.0> 2011-01-03 13:27:28 =============================================================================== vbucketmigrator<0.5967.0>: Bucket 89 moved to the next server vbucketmigrator<0.5967.0>: Validate bucket states vbucketmigrator<0.5967.0>: 89 ok INFO REPORT <0.8675.0> 2011-01-03 13:27:29 =============================================================================== vbucketmigrator<0.8675.0>: Bucket 674 moved to the next server vbucketmigrator<0.8675.0>: Validate bucket states vbucketmigrator<0.8675.0>: 674 ok INFO REPORT <11993.5985.0> 2011-01-03 13:27:29 =============================================================================== vbucketmigrator<0.5985.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.5985.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5985.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.5985.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.5985.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5985.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.5985.0>: Starting to move bucket 90 INFO REPORT <0.8683.0> 2011-01-03 13:27:30 =============================================================================== vbucketmigrator<0.8683.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8683.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8683.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8683.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8683.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8683.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8683.0>: Starting to move bucket 675 INFO REPORT <0.8683.0> 2011-01-03 13:27:30 =============================================================================== vbucketmigrator<0.8683.0>: Bucket 675 moved to the next server vbucketmigrator<0.8683.0>: Validate bucket states vbucketmigrator<0.8683.0>: 675 ok INFO REPORT <0.259.0> 2011-01-03 13:27:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 684 auth_errors 0 bucket_conns 2 bytes_read 5781430075 bytes_written 155644608 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 3677966 conn_yields 238 connection_structures 235 curr_connections 24 curr_items 2283193 curr_items_tot 4099756 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 4227 ep_commit_time 0 ep_commit_time_total 1312 ep_data_age 198 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 70 ep_flush_duration_highwat 297 ep_flush_duration_total 1751 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 746551 ep_io_num_write 4103390 ep_io_read_bytes 961008240 ep_io_write_bytes 4633759783 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2360704692 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 659025 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1950230 ep_num_not_my_vbuckets 561512 ep_num_pager_runs 6 ep_num_value_ejects 1951599 ep_oom_errors 0 ep_overhead 35295992 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 0 ep_storage_age 0 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 740295 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5011079772 ep_total_del_items 0 ep_total_enqueued 4103495 ep_total_new_items 4099756 ep_total_persisted 4103390 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2396000684 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 681 tap_mutation_received 1576969 tap_mutation_sent 3463722 tap_opaque_received 1468 tap_opaque_sent 2094 tap_vbucket_set_sent 1354 threads 4 time 1294090051 total_connections 1328 uptime 2065 version 1.4.4_304_g7d5a132 INFO REPORT <0.8690.0> 2011-01-03 13:27:31 =============================================================================== vbucketmigrator<0.8690.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8690.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8690.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8690.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8690.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8690.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8690.0>: Starting to move bucket 676 INFO REPORT <0.8690.0> 2011-01-03 13:27:32 =============================================================================== vbucketmigrator<0.8690.0>: Bucket 676 moved to the next server vbucketmigrator<0.8690.0>: Validate bucket states vbucketmigrator<0.8690.0>: 676 ok INFO REPORT <0.8695.0> 2011-01-03 13:27:33 =============================================================================== vbucketmigrator<0.8695.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8695.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8695.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8695.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8695.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8695.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8695.0>: Starting to move bucket 677 INFO REPORT <0.8695.0> 2011-01-03 13:27:34 =============================================================================== vbucketmigrator<0.8695.0>: Bucket 677 moved to the next server vbucketmigrator<0.8695.0>: Validate bucket states vbucketmigrator<0.8695.0>: 677 ok INFO REPORT <0.8700.0> 2011-01-03 13:27:35 =============================================================================== vbucketmigrator<0.8700.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8700.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8700.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8700.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8700.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8700.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8700.0>: Starting to move bucket 678 INFO REPORT <11993.5985.0> 2011-01-03 13:27:36 =============================================================================== vbucketmigrator<0.5985.0>: Bucket 90 moved to the next server vbucketmigrator<0.5985.0>: Validate bucket states vbucketmigrator<0.5985.0>: 90 ok INFO REPORT <0.8700.0> 2011-01-03 13:27:37 =============================================================================== vbucketmigrator<0.8700.0>: Bucket 678 moved to the next server vbucketmigrator<0.8700.0>: Validate bucket states vbucketmigrator<0.8700.0>: 678 ok INFO REPORT <11993.6001.0> 2011-01-03 13:27:37 =============================================================================== vbucketmigrator<0.6001.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6001.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6001.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6001.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6001.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6001.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6001.0>: Starting to move bucket 91 INFO REPORT <0.8706.0> 2011-01-03 13:27:38 =============================================================================== vbucketmigrator<0.8706.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8706.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8706.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8706.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8706.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8706.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8706.0>: Starting to move bucket 679 INFO REPORT <0.110.0> 2011-01-03 13:27:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.9166666666666666]], [['ns_1@10.2.1.101'| 0.21212121212121215]], [['ns_1@10.2.1.102'| 0.44217687074829937]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.8706.0> 2011-01-03 13:27:39 =============================================================================== vbucketmigrator<0.8706.0>: Bucket 679 moved to the next server vbucketmigrator<0.8706.0>: Validate bucket states vbucketmigrator<0.8706.0>: 679 ok INFO REPORT <0.8716.0> 2011-01-03 13:27:40 =============================================================================== vbucketmigrator<0.8716.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8716.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8716.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8716.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8716.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8716.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8716.0>: Starting to move bucket 680 INFO REPORT <0.8716.0> 2011-01-03 13:27:41 =============================================================================== vbucketmigrator<0.8716.0>: Bucket 680 moved to the next server vbucketmigrator<0.8716.0>: Validate bucket states vbucketmigrator<0.8716.0>: 680 ok INFO REPORT <0.8718.0> 2011-01-03 13:27:42 =============================================================================== vbucketmigrator<0.8718.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8718.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8718.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8718.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8718.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8718.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8718.0>: Starting to move bucket 681 INFO REPORT <0.8718.0> 2011-01-03 13:27:43 =============================================================================== vbucketmigrator<0.8718.0>: Bucket 681 moved to the next server vbucketmigrator<0.8718.0>: Validate bucket states vbucketmigrator<0.8718.0>: 681 ok INFO REPORT <11993.6001.0> 2011-01-03 13:27:44 =============================================================================== vbucketmigrator<0.6001.0>: Bucket 91 moved to the next server vbucketmigrator<0.6001.0>: Validate bucket states vbucketmigrator<0.6001.0>: 91 ok INFO REPORT <0.8724.0> 2011-01-03 13:27:44 =============================================================================== vbucketmigrator<0.8724.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.8724.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8724.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.8724.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.8724.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8724.0>: Authenticated towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.8724.0>: Starting to move bucket 682 INFO REPORT <0.8724.0> 2011-01-03 13:27:45 =============================================================================== vbucketmigrator<0.8724.0>: Bucket 682 moved to the next server vbucketmigrator<0.8724.0>: Validate bucket states vbucketmigrator<0.8724.0>: 682 ok INFO REPORT <11993.6017.0> 2011-01-03 13:27:45 =============================================================================== vbucketmigrator<0.6017.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6017.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6017.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6017.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6017.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6017.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6017.0>: Starting to move bucket 92 INFO REPORT <0.85.0> 2011-01-03 13:27:47 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.110.0> 2011-01-03 13:27:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.2222222222222222]], [['ns_1@10.2.1.102'| 0.47619047619047616]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6017.0> 2011-01-03 13:27:55 =============================================================================== vbucketmigrator<0.6017.0>: Bucket 92 moved to the next server vbucketmigrator<0.6017.0>: Validate bucket states vbucketmigrator<0.6017.0>: 92 ok INFO REPORT <11993.6043.0> 2011-01-03 13:27:57 =============================================================================== vbucketmigrator<0.6043.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6043.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6043.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6043.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6043.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6043.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6043.0>: Starting to move bucket 93 INFO REPORT <0.110.0> 2011-01-03 13:27:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.2222222222222222]], [['ns_1@10.2.1.102'| 0.47619047619047616]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:28:02 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.6043.0> 2011-01-03 13:28:04 =============================================================================== vbucketmigrator<0.6043.0>: Bucket 93 moved to the next server vbucketmigrator<0.6043.0>: Validate bucket states vbucketmigrator<0.6043.0>: 93 ok INFO REPORT <11993.6064.0> 2011-01-03 13:28:05 =============================================================================== vbucketmigrator<0.6064.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6064.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6064.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6064.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6064.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6064.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6064.0>: Starting to move bucket 94 INFO REPORT <0.93.0> 2011-01-03 13:28:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90087,399401}}, {active_buckets,["default"]}, {memory, [{total,20127624}, {processes,12134372}, {processes_used,12116844}, {system,7993252}, {atom,560301}, {atom_used,557531}, {binary,228912}, {code,4570913}, {ets,1210580}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2099}, {memory_data,{4284698624,4191027200,{<0.299.0>,3328596}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,77316096}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2090404,0}}, {context_switches,{655787,0}}, {garbage_collection,{121206,1266244910,0}}, {io,{{input,57308534},{output,30009787}}}, {reductions,{550044222,1037185}}, {run_queue,0}, {runtime,{27861,31}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90088,381402}}, {active_buckets,["default"]}, {memory, [{total,18931496}, {processes,10853292}, {processes_used,10844612}, {system,8078204}, {atom,559813}, {atom_used,556363}, {binary,346368}, {code,4551541}, {ets,1205332}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2030}, {memory_data,{4284698624,4250001408,{<11993.387.0>,2843024}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,80846848}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2021059,15}}, {context_switches,{351270,0}}, {garbage_collection,{86701,761963060,0}}, {io,{{input,46476180},{output,22751875}}}, {reductions,{224218543,2285217}}, {run_queue,0}, {runtime,{16910,203}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90088,147400}}, {active_buckets,["default"]}, {memory, [{total,13696136}, {processes,6351036}, {processes_used,6337612}, {system,7345100}, {atom,541077}, {atom_used,528744}, {binary,319096}, {code,4280811}, {ets,792452}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1018}, {memory_data,{4284698624,2492628992,{<10870.218.0>,971680}}}, {disk_data, [{"C:\\",49423972,39},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1638178816}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1010325,0}}, {context_switches,{106754,0}}, {garbage_collection,{28255,136866724,0}}, {io,{{input,12901230},{output,8869650}}}, {reductions,{56604481,578739}}, {run_queue,0}, {runtime,{5959,78}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:28:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.23232323232323238]], [['ns_1@10.2.1.102'| 0.48299319727891155]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6064.0> 2011-01-03 13:28:13 =============================================================================== vbucketmigrator<0.6064.0>: Bucket 94 moved to the next server vbucketmigrator<0.6064.0>: Validate bucket states vbucketmigrator<0.6064.0>: 94 ok INFO REPORT <11993.6101.0> 2011-01-03 13:28:14 =============================================================================== vbucketmigrator<0.6101.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6101.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6101.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6101.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6101.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6101.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6101.0>: Starting to move bucket 95 INFO REPORT <0.110.0> 2011-01-03 13:28:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.24242424242424243]], [['ns_1@10.2.1.102'| 0.4897959183673469]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6101.0> 2011-01-03 13:28:21 =============================================================================== vbucketmigrator<0.6101.0>: Bucket 95 moved to the next server vbucketmigrator<0.6101.0>: Validate bucket states vbucketmigrator<0.6101.0>: 95 ok INFO REPORT <11993.6118.0> 2011-01-03 13:28:22 =============================================================================== vbucketmigrator<0.6118.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6118.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6118.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6118.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6118.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6118.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6118.0>: Starting to move bucket 96 INFO REPORT <0.110.0> 2011-01-03 13:28:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.2525252525252525]], [['ns_1@10.2.1.102'| 0.4965986394557823]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6118.0> 2011-01-03 13:28:30 =============================================================================== vbucketmigrator<0.6118.0>: Bucket 96 moved to the next server vbucketmigrator<0.6118.0>: Validate bucket states vbucketmigrator<0.6118.0>: 96 ok INFO REPORT <11993.6145.0> 2011-01-03 13:28:31 =============================================================================== vbucketmigrator<0.6145.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6145.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6145.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6145.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6145.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6145.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6145.0>: Starting to move bucket 97 INFO REPORT <0.110.0> 2011-01-03 13:28:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.26262626262626265]], [['ns_1@10.2.1.102'| 0.5034013605442177]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6145.0> 2011-01-03 13:28:39 =============================================================================== vbucketmigrator<0.6145.0>: Bucket 97 moved to the next server vbucketmigrator<0.6145.0>: Validate bucket states vbucketmigrator<0.6145.0>: 97 ok INFO REPORT <11993.6165.0> 2011-01-03 13:28:40 =============================================================================== vbucketmigrator<0.6165.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6165.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6165.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6165.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6165.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6165.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6165.0>: Starting to move bucket 98 INFO REPORT <0.85.0> 2011-01-03 13:28:45 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <11993.6165.0> 2011-01-03 13:28:46 =============================================================================== vbucketmigrator<0.6165.0>: Bucket 98 moved to the next server vbucketmigrator<0.6165.0>: Validate bucket states vbucketmigrator<0.6165.0>: 98 ok INFO REPORT <11993.6181.0> 2011-01-03 13:28:47 =============================================================================== vbucketmigrator<0.6181.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6181.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6181.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6181.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6181.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6181.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6181.0>: Starting to move bucket 99 INFO REPORT <0.110.0> 2011-01-03 13:28:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.2828282828282829]], [['ns_1@10.2.1.102'| 0.5170068027210885]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6181.0> 2011-01-03 13:28:55 =============================================================================== vbucketmigrator<0.6181.0>: Bucket 99 moved to the next server vbucketmigrator<0.6181.0>: Validate bucket states vbucketmigrator<0.6181.0>: 99 ok INFO REPORT <11993.6201.0> 2011-01-03 13:28:56 =============================================================================== vbucketmigrator<0.6201.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6201.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6201.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6201.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6201.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6201.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6201.0>: Starting to move bucket 100 INFO REPORT <0.110.0> 2011-01-03 13:28:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.29292929292929293]], [['ns_1@10.2.1.102'| 0.5238095238095238]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6201.0> 2011-01-03 13:29:05 =============================================================================== vbucketmigrator<0.6201.0>: Bucket 100 moved to the next server vbucketmigrator<0.6201.0>: Validate bucket states vbucketmigrator<0.6201.0>: 100 ok INFO REPORT <11993.6218.0> 2011-01-03 13:29:06 =============================================================================== vbucketmigrator<0.6218.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6218.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6218.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6218.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6218.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6218.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6218.0>: Starting to move bucket 101 INFO REPORT <0.93.0> 2011-01-03 13:29:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90147,412401}}, {active_buckets,["default"]}, {memory, [{total,27304600}, {processes,19316292}, {processes_used,19297900}, {system,7988308}, {atom,560301}, {atom_used,557531}, {binary,190744}, {code,4570913}, {ets,1244644}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2159}, {memory_data,{4284698624,4204601344,{<0.299.0>,3328596}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,80855040}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2150402,0}}, {context_switches,{670426,0}}, {garbage_collection,{123679,1299146804,0}}, {io,{{input,58575487},{output,30544972}}}, {reductions,{560101328,1020616}}, {run_queue,0}, {runtime,{28407,31}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90148,395401}}, {active_buckets,["default"]}, {memory, [{total,19785528}, {processes,11671988}, {processes_used,11663308}, {system,8113540}, {atom,559813}, {atom_used,556363}, {binary,346072}, {code,4551541}, {ets,1240924}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2090}, {memory_data,{4284698624,4209770496,{<11993.387.0>,4114268}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,50098176}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2080902,0}}, {context_switches,{362730,0}}, {garbage_collection,{89084,785424615,0}}, {io,{{input,48373785},{output,23902012}}}, {reductions,{231933397,2203406}}, {run_queue,0}, {runtime,{17331,140}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90148,161400}}, {active_buckets,["default"]}, {memory, [{total,13345496}, {processes,5965916}, {processes_used,5952492}, {system,7379580}, {atom,541077}, {atom_used,528744}, {binary,319520}, {code,4280811}, {ets,826492}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1078}, {memory_data,{4284698624,2655416320,{<10870.307.0>,1271680}}}, {disk_data, [{"C:\\",49423972,40},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1585430528}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1070323,0}}, {context_switches,{112354,0}}, {garbage_collection,{29943,145424901,0}}, {io,{{input,13166603},{output,9136295}}}, {reductions,{60144188,576624}}, {run_queue,0}, {runtime,{6630,16}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:29:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.303030303030303]], [['ns_1@10.2.1.102'| 0.5306122448979591]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:29:11 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.259.0> 2011-01-03 13:29:11 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 690 auth_errors 0 bucket_conns 1 bytes_read 5781434521 bytes_written 156667294 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 3677966 conn_yields 238 connection_structures 235 curr_connections 23 curr_items 2237258 curr_items_tot 4099756 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 4227 ep_commit_time 0 ep_commit_time_total 1312 ep_data_age 198 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 70 ep_flush_duration_highwat 297 ep_flush_duration_total 1751 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 790849 ep_io_num_write 4103390 ep_io_read_bytes 1004360426 ep_io_write_bytes 4633759783 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2360704692 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 645644 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1950230 ep_num_not_my_vbuckets 561512 ep_num_pager_runs 6 ep_num_value_ejects 1951599 ep_oom_errors 0 ep_overhead 35295992 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 0 ep_storage_age 0 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 784593 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5011079772 ep_total_del_items 0 ep_total_enqueued 4103495 ep_total_new_items 4099756 ep_total_persisted 4103390 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2396000684 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 687 tap_mutation_received 1576969 tap_mutation_sent 3535909 tap_opaque_received 1468 tap_opaque_sent 2100 tap_vbucket_set_sent 1367 threads 4 time 1294090151 total_connections 1334 uptime 2165 version 1.4.4_304_g7d5a132 INFO REPORT <11993.6218.0> 2011-01-03 13:29:17 =============================================================================== vbucketmigrator<0.6218.0>: Bucket 101 moved to the next server vbucketmigrator<0.6218.0>: Validate bucket states vbucketmigrator<0.6218.0>: 101 ok INFO REPORT <11993.6244.0> 2011-01-03 13:29:18 =============================================================================== vbucketmigrator<0.6244.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6244.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6244.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6244.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6244.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6244.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6244.0>: Starting to move bucket 102 INFO REPORT <0.110.0> 2011-01-03 13:29:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.31313131313131315]], [['ns_1@10.2.1.102'| 0.5374149659863945]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6244.0> 2011-01-03 13:29:27 =============================================================================== vbucketmigrator<0.6244.0>: Bucket 102 moved to the next server vbucketmigrator<0.6244.0>: Validate bucket states vbucketmigrator<0.6244.0>: 102 ok INFO REPORT <0.110.0> 2011-01-03 13:29:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.3232323232323232]], [['ns_1@10.2.1.102'| 0.5442176870748299]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6267.0> 2011-01-03 13:29:28 =============================================================================== vbucketmigrator<0.6267.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6267.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6267.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6267.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6267.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6267.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6267.0>: Starting to move bucket 103 INFO REPORT <11993.6267.0> 2011-01-03 13:29:35 =============================================================================== vbucketmigrator<0.6267.0>: Bucket 103 moved to the next server vbucketmigrator<0.6267.0>: Validate bucket states vbucketmigrator<0.6267.0>: 103 ok INFO REPORT <11993.6288.0> 2011-01-03 13:29:36 =============================================================================== vbucketmigrator<0.6288.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6288.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6288.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6288.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6288.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6288.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6288.0>: Starting to move bucket 104 INFO REPORT <0.110.0> 2011-01-03 13:29:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.33333333333333337]], [['ns_1@10.2.1.102'| 0.5510204081632653]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:29:42 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.6288.0> 2011-01-03 13:29:43 =============================================================================== vbucketmigrator<0.6288.0>: Bucket 104 moved to the next server vbucketmigrator<0.6288.0>: Validate bucket states vbucketmigrator<0.6288.0>: 104 ok INFO REPORT <11993.6305.0> 2011-01-03 13:29:44 =============================================================================== vbucketmigrator<0.6305.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6305.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6305.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6305.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6305.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6305.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6305.0>: Starting to move bucket 105 INFO REPORT <0.110.0> 2011-01-03 13:29:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.3434343434343434]], [['ns_1@10.2.1.102'| 0.5578231292517006]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6305.0> 2011-01-03 13:29:51 =============================================================================== vbucketmigrator<0.6305.0>: Bucket 105 moved to the next server vbucketmigrator<0.6305.0>: Validate bucket states vbucketmigrator<0.6305.0>: 105 ok INFO REPORT <11993.6323.0> 2011-01-03 13:29:52 =============================================================================== vbucketmigrator<0.6323.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6323.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6323.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6323.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6323.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6323.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6323.0>: Starting to move bucket 106 INFO REPORT <0.110.0> 2011-01-03 13:29:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.3535353535353535]], [['ns_1@10.2.1.102'| 0.564625850340136]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6323.0> 2011-01-03 13:30:00 =============================================================================== vbucketmigrator<0.6323.0>: Bucket 106 moved to the next server vbucketmigrator<0.6323.0>: Validate bucket states vbucketmigrator<0.6323.0>: 106 ok INFO REPORT <11993.6345.0> 2011-01-03 13:30:01 =============================================================================== vbucketmigrator<0.6345.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6345.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6345.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6345.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6345.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6345.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6345.0>: Starting to move bucket 107 INFO REPORT <0.93.0> 2011-01-03 13:30:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90207,410401}}, {active_buckets,["default"]}, {memory, [{total,19601336}, {processes,11586164}, {processes_used,11567268}, {system,8015172}, {atom,560301}, {atom_used,557531}, {binary,182496}, {code,4570913}, {ets,1280404}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2219}, {memory_data,{4284698624,4203855872,{<0.8553.0>,8003076}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,81358848}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2210400,0}}, {context_switches,{683981,0}}, {garbage_collection,{125881,1329854548,0}}, {io,{{input,60474749},{output,32259030}}}, {reductions,{568787897,952664}}, {run_queue,0}, {runtime,{28844,62}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90208,393401}}, {active_buckets,["default"]}, {memory, [{total,20598736}, {processes,12491132}, {processes_used,12482452}, {system,8107604}, {atom,559813}, {atom_used,556363}, {binary,341368}, {code,4551541}, {ets,1239348}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2150}, {memory_data,{4284698624,4250460160,{<11993.387.0>,5385512}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,69038080}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2140900,0}}, {context_switches,{373378,0}}, {garbage_collection,{91218,807649947,0}}, {io,{{input,49540035},{output,24298056}}}, {reductions,{239739378,2213357}}, {run_queue,0}, {runtime,{17768,187}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90208,159400}}, {active_buckets,["default"]}, {memory, [{total,14669272}, {processes,7271884}, {processes_used,7258460}, {system,7397388}, {atom,541077}, {atom_used,528744}, {binary,301480}, {code,4280811}, {ets,862564}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1138}, {memory_data,{4284698624,2707099648,{<10870.307.0>,900736}}}, {disk_data, [{"C:\\",49423972,40},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1538469888}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1130321,0}}, {context_switches,{118104,0}}, {garbage_collection,{31620,154084723,0}}, {io,{{input,14162522},{output,10047702}}}, {reductions,{63766011,578409}}, {run_queue,0}, {runtime,{6926,47}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:30:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.36363636363636365]], [['ns_1@10.2.1.102'| 0.5714285714285714]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6345.0> 2011-01-03 13:30:10 =============================================================================== vbucketmigrator<0.6345.0>: Bucket 107 moved to the next server vbucketmigrator<0.6345.0>: Validate bucket states vbucketmigrator<0.6345.0>: 107 ok INFO REPORT <11993.6367.0> 2011-01-03 13:30:11 =============================================================================== vbucketmigrator<0.6367.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6367.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6367.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6367.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6367.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6367.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6367.0>: Starting to move bucket 108 INFO REPORT <0.110.0> 2011-01-03 13:30:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.3737373737373737]], [['ns_1@10.2.1.102'| 0.5782312925170068]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6367.0> 2011-01-03 13:30:20 =============================================================================== vbucketmigrator<0.6367.0>: Bucket 108 moved to the next server vbucketmigrator<0.6367.0>: Validate bucket states vbucketmigrator<0.6367.0>: 108 ok INFO REPORT <11993.6391.0> 2011-01-03 13:30:21 =============================================================================== vbucketmigrator<0.6391.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6391.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6391.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6391.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6391.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6391.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6391.0>: Starting to move bucket 109 INFO REPORT <0.110.0> 2011-01-03 13:30:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.38383838383838387]], [['ns_1@10.2.1.102'| 0.5850340136054422]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6391.0> 2011-01-03 13:30:29 =============================================================================== vbucketmigrator<0.6391.0>: Bucket 109 moved to the next server vbucketmigrator<0.6391.0>: Validate bucket states vbucketmigrator<0.6391.0>: 109 ok INFO REPORT <11993.6411.0> 2011-01-03 13:30:30 =============================================================================== vbucketmigrator<0.6411.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6411.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6411.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6411.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6411.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6411.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6411.0>: Starting to move bucket 110 INFO REPORT <0.85.0> 2011-01-03 13:30:36 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.6411.0> 2011-01-03 13:30:38 =============================================================================== vbucketmigrator<0.6411.0>: Bucket 110 moved to the next server vbucketmigrator<0.6411.0>: Validate bucket states vbucketmigrator<0.6411.0>: 110 ok INFO REPORT <0.110.0> 2011-01-03 13:30:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.4040404040404041]], [['ns_1@10.2.1.102'| 0.5986394557823129]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6426.0> 2011-01-03 13:30:39 =============================================================================== vbucketmigrator<0.6426.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6426.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6426.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6426.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6426.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6426.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6426.0>: Starting to move bucket 111 INFO REPORT <11993.6426.0> 2011-01-03 13:30:46 =============================================================================== vbucketmigrator<0.6426.0>: Bucket 111 moved to the next server vbucketmigrator<0.6426.0>: Validate bucket states vbucketmigrator<0.6426.0>: 111 ok INFO REPORT <11993.6446.0> 2011-01-03 13:30:47 =============================================================================== vbucketmigrator<0.6446.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6446.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6446.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6446.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6446.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6446.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6446.0>: Starting to move bucket 112 INFO REPORT <0.110.0> 2011-01-03 13:30:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.41414141414141414]], [['ns_1@10.2.1.102'| 0.6054421768707483]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.259.0> 2011-01-03 13:30:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 690 auth_errors 0 bucket_conns 1 bytes_read 5781438091 bytes_written 157689234 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 3677966 conn_yields 238 connection_structures 235 curr_connections 23 curr_items 2237258 curr_items_tot 4099756 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 4227 ep_commit_time 0 ep_commit_time_total 1312 ep_data_age 198 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 70 ep_flush_duration_highwat 297 ep_flush_duration_total 1751 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 790849 ep_io_num_write 4103390 ep_io_read_bytes 1004360426 ep_io_write_bytes 4633759783 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2360704692 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 645644 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1950230 ep_num_not_my_vbuckets 561512 ep_num_pager_runs 6 ep_num_value_ejects 1951599 ep_oom_errors 0 ep_overhead 35295992 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 0 ep_storage_age 0 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 784593 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5011079772 ep_total_del_items 0 ep_total_enqueued 4103495 ep_total_new_items 4099756 ep_total_persisted 4103390 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2396000684 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 687 tap_mutation_received 1576969 tap_mutation_sent 3535909 tap_opaque_received 1468 tap_opaque_sent 2100 tap_vbucket_set_sent 1367 threads 4 time 1294090250 total_connections 1334 uptime 2264 version 1.4.4_304_g7d5a132 INFO REPORT <0.85.0> 2011-01-03 13:30:54 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.6446.0> 2011-01-03 13:30:55 =============================================================================== vbucketmigrator<0.6446.0>: Bucket 112 moved to the next server vbucketmigrator<0.6446.0>: Validate bucket states vbucketmigrator<0.6446.0>: 112 ok INFO REPORT <11993.6469.0> 2011-01-03 13:30:56 =============================================================================== vbucketmigrator<0.6469.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6469.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6469.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6469.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6469.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6469.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6469.0>: Starting to move bucket 113 INFO REPORT <0.110.0> 2011-01-03 13:30:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.4242424242424242]], [['ns_1@10.2.1.102'| 0.6122448979591837]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:31:03 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.6469.0> 2011-01-03 13:31:04 =============================================================================== vbucketmigrator<0.6469.0>: Bucket 113 moved to the next server vbucketmigrator<0.6469.0>: Validate bucket states vbucketmigrator<0.6469.0>: 113 ok INFO REPORT <11993.6489.0> 2011-01-03 13:31:06 =============================================================================== vbucketmigrator<0.6489.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6489.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6489.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6489.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6489.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6489.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6489.0>: Starting to move bucket 114 INFO REPORT <0.93.0> 2011-01-03 13:31:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90267,408401}}, {active_buckets,["default"]}, {memory, [{total,27971248}, {processes,19897692}, {processes_used,19879380}, {system,8073556}, {atom,560301}, {atom_used,557531}, {binary,203448}, {code,4570913}, {ets,1317460}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2279}, {memory_data,{4284698624,4203581440,{<0.299.0>,3328596}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,81477632}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2270398,0}}, {context_switches,{697605,0}}, {garbage_collection,{128066,1360673633,0}}, {io,{{input,61665262},{output,32765977}}}, {reductions,{577705964,966310}}, {run_queue,0}, {runtime,{29390,78}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90268,391401}}, {active_buckets,["default"]}, {memory, [{total,20691960}, {processes,12545204}, {processes_used,12536524}, {system,8146756}, {atom,559813}, {atom_used,556363}, {binary,345472}, {code,4551541}, {ets,1273308}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2210}, {memory_data,{4284698624,4228497408,{<11993.387.0>,5385512}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,35762176}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2200898,0}}, {context_switches,{384710,0}}, {garbage_collection,{93572,832666715,0}}, {io,{{input,50722226},{output,24727889}}}, {reductions,{247892450,2326651}}, {run_queue,0}, {runtime,{18283,156}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90268,157402}}, {active_buckets,["default"]}, {memory, [{total,14365408}, {processes,6917660}, {processes_used,6904236}, {system,7447748}, {atom,541077}, {atom_used,528744}, {binary,317840}, {code,4280811}, {ets,896532}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1198}, {memory_data,{4284698624,2756075520,{<10870.307.0>,2057352}}}, {disk_data, [{"C:\\",49423972,40},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1488396288}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1190319,0}}, {context_switches,{123567,0}}, {garbage_collection,{33204,162239818,0}}, {io,{{input,14436477},{output,10313842}}}, {reductions,{67259904,583456}}, {run_queue,0}, {runtime,{7160,16}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:31:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.43434343434343436]], [['ns_1@10.2.1.102'| 0.6190476190476191]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:31:11 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <11993.6489.0> 2011-01-03 13:31:15 =============================================================================== vbucketmigrator<0.6489.0>: Bucket 114 moved to the next server vbucketmigrator<0.6489.0>: Validate bucket states vbucketmigrator<0.6489.0>: 114 ok INFO REPORT <11993.6528.0> 2011-01-03 13:31:16 =============================================================================== vbucketmigrator<0.6528.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6528.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6528.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6528.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6528.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6528.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6528.0>: Starting to move bucket 115 INFO REPORT <0.110.0> 2011-01-03 13:31:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.4444444444444444]], [['ns_1@10.2.1.102'| 0.6258503401360545]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6528.0> 2011-01-03 13:31:25 =============================================================================== vbucketmigrator<0.6528.0>: Bucket 115 moved to the next server vbucketmigrator<0.6528.0>: Validate bucket states vbucketmigrator<0.6528.0>: 115 ok INFO REPORT <11993.6555.0> 2011-01-03 13:31:26 =============================================================================== vbucketmigrator<0.6555.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6555.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6555.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6555.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6555.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6555.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6555.0>: Starting to move bucket 116 INFO REPORT <0.110.0> 2011-01-03 13:31:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.4545454545454546]], [['ns_1@10.2.1.102'| 0.6326530612244898]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6555.0> 2011-01-03 13:31:36 =============================================================================== vbucketmigrator<0.6555.0>: Bucket 116 moved to the next server vbucketmigrator<0.6555.0>: Validate bucket states vbucketmigrator<0.6555.0>: 116 ok INFO REPORT <11993.6577.0> 2011-01-03 13:31:37 =============================================================================== vbucketmigrator<0.6577.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6577.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6577.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6577.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6577.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6577.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6577.0>: Starting to move bucket 117 INFO REPORT <0.110.0> 2011-01-03 13:31:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.46464646464646464]], [['ns_1@10.2.1.102'| 0.6394557823129252]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:31:39 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.6577.0> 2011-01-03 13:31:44 =============================================================================== vbucketmigrator<0.6577.0>: Bucket 117 moved to the next server vbucketmigrator<0.6577.0>: Validate bucket states vbucketmigrator<0.6577.0>: 117 ok INFO REPORT <11993.6597.0> 2011-01-03 13:31:45 =============================================================================== vbucketmigrator<0.6597.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6597.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6597.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6597.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6597.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6597.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6597.0>: Starting to move bucket 118 INFO REPORT <0.110.0> 2011-01-03 13:31:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.4747474747474747]], [['ns_1@10.2.1.102'| 0.6462585034013606]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6597.0> 2011-01-03 13:31:51 =============================================================================== vbucketmigrator<0.6597.0>: Bucket 118 moved to the next server vbucketmigrator<0.6597.0>: Validate bucket states vbucketmigrator<0.6597.0>: 118 ok INFO REPORT <11993.6612.0> 2011-01-03 13:31:52 =============================================================================== vbucketmigrator<0.6612.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6612.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6612.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6612.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6612.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6612.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6612.0>: Starting to move bucket 119 INFO REPORT <0.85.0> 2011-01-03 13:31:56 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:31:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.48484848484848486]], [['ns_1@10.2.1.102'| 0.653061224489796]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6612.0> 2011-01-03 13:32:01 =============================================================================== vbucketmigrator<0.6612.0>: Bucket 119 moved to the next server vbucketmigrator<0.6612.0>: Validate bucket states vbucketmigrator<0.6612.0>: 119 ok INFO REPORT <11993.6638.0> 2011-01-03 13:32:02 =============================================================================== vbucketmigrator<0.6638.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6638.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6638.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6638.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6638.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6638.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6638.0>: Starting to move bucket 120 INFO REPORT <0.93.0> 2011-01-03 13:32:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90327,406401}}, {active_buckets,["default"]}, {memory, [{total,20620432}, {processes,12563292}, {processes_used,12545172}, {system,8057140}, {atom,560301}, {atom_used,557531}, {binary,187936}, {code,4570913}, {ets,1316292}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2339}, {memory_data,{4284698624,4205273088,{<0.9057.0>,8003076}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,88092672}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2330396,0}}, {context_switches,{712682,0}}, {garbage_collection,{130473,1396178987,0}}, {io,{{input,63292786},{output,33347151}}}, {reductions,{587804893,1085255}}, {run_queue,0}, {runtime,{30092,78}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90328,389401}}, {active_buckets,["default"]}, {memory, [{total,19207736}, {processes,11023556}, {processes_used,11014876}, {system,8184180}, {atom,559813}, {atom_used,556363}, {binary,344336}, {code,4551541}, {ets,1311908}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2270}, {memory_data,{4284698624,4250288128,{<11993.387.0>,5385512}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,92700672}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2260912,16}}, {context_switches,{396600,0}}, {garbage_collection,{95842,858282881,0}}, {io,{{input,52689058},{output,26414665}}}, {reductions,{256337537,2428667}}, {run_queue,0}, {runtime,{18813,156}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90328,155400}}, {active_buckets,["default"]}, {memory, [{total,14898800}, {processes,7452092}, {processes_used,7438668}, {system,7446708}, {atom,541077}, {atom_used,528744}, {binary,316936}, {code,4280811}, {ets,896140}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1258}, {memory_data,{4284698624,2802933760,{<10870.307.0>,1757252}}}, {disk_data, [{"C:\\",49423972,40},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1435824128}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1250317,0}}, {context_switches,{129109,0}}, {garbage_collection,{34853,171049025,0}}, {io,{{input,14677268},{output,10586313}}}, {reductions,{70772222,567616}}, {run_queue,0}, {runtime,{7425,47}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:32:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.4949494949494949]], [['ns_1@10.2.1.102'| 0.6598639455782314]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6638.0> 2011-01-03 13:32:09 =============================================================================== vbucketmigrator<0.6638.0>: Bucket 120 moved to the next server vbucketmigrator<0.6638.0>: Validate bucket states vbucketmigrator<0.6638.0>: 120 ok INFO REPORT <11993.6656.0> 2011-01-03 13:32:10 =============================================================================== vbucketmigrator<0.6656.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6656.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6656.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6656.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6656.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6656.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6656.0>: Starting to move bucket 121 INFO REPORT <11993.6656.0> 2011-01-03 13:32:18 =============================================================================== vbucketmigrator<0.6656.0>: Bucket 121 moved to the next server vbucketmigrator<0.6656.0>: Validate bucket states vbucketmigrator<0.6656.0>: 121 ok INFO REPORT <0.110.0> 2011-01-03 13:32:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.5151515151515151]], [['ns_1@10.2.1.102'| 0.6734693877551021]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6672.0> 2011-01-03 13:32:19 =============================================================================== vbucketmigrator<0.6672.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6672.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6672.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6672.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6672.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6672.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6672.0>: Starting to move bucket 122 INFO REPORT <11993.6672.0> 2011-01-03 13:32:26 =============================================================================== vbucketmigrator<0.6672.0>: Bucket 122 moved to the next server vbucketmigrator<0.6672.0>: Validate bucket states vbucketmigrator<0.6672.0>: 122 ok INFO REPORT <0.85.0> 2011-01-03 13:32:27 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.6695.0> 2011-01-03 13:32:27 =============================================================================== vbucketmigrator<0.6695.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6695.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6695.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6695.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6695.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6695.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6695.0>: Starting to move bucket 123 INFO REPORT <0.110.0> 2011-01-03 13:32:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.5252525252525253]], [['ns_1@10.2.1.102'| 0.6802721088435374]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.259.0> 2011-01-03 13:32:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 690 auth_errors 0 bucket_conns 21 bytes_read 5790368560 bytes_written 164004127 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 3830609 conn_yields 238 connection_structures 235 curr_connections 43 curr_items 2344131 curr_items_tot 4206629 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 4378 ep_commit_time 1 ep_commit_time_total 1349 ep_data_age 5 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 2 ep_flush_duration_highwat 297 ep_flush_duration_total 1801 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 4434 ep_io_num_read 790849 ep_io_num_write 4204748 ep_io_read_bytes 1004360426 ep_io_write_bytes 4636443853 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2373184987 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 645548 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1950134 ep_num_not_my_vbuckets 606926 ep_num_pager_runs 6 ep_num_value_ejects 1951599 ep_oom_errors 0 ep_overhead 35580666 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 1440 ep_storage_age 4 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 784593 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5023560067 ep_total_del_items 0 ep_total_enqueued 4210769 ep_total_new_items 4200782 ep_total_persisted 4204748 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2408765653 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 687 tap_mutation_received 1576969 tap_mutation_sent 3535909 tap_opaque_received 1468 tap_opaque_sent 2100 tap_vbucket_set_sent 1367 threads 4 time 1294090351 total_connections 1354 uptime 2365 version 1.4.4_304_g7d5a132 INFO REPORT <11993.6695.0> 2011-01-03 13:32:35 =============================================================================== vbucketmigrator<0.6695.0>: Bucket 123 moved to the next server vbucketmigrator<0.6695.0>: Validate bucket states vbucketmigrator<0.6695.0>: 123 ok INFO REPORT <11993.6716.0> 2011-01-03 13:32:36 =============================================================================== vbucketmigrator<0.6716.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6716.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6716.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6716.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6716.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6716.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6716.0>: Starting to move bucket 124 INFO REPORT <0.110.0> 2011-01-03 13:32:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.5353535353535354]], [['ns_1@10.2.1.102'| 0.6870748299319728]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6716.0> 2011-01-03 13:32:42 =============================================================================== vbucketmigrator<0.6716.0>: Bucket 124 moved to the next server vbucketmigrator<0.6716.0>: Validate bucket states vbucketmigrator<0.6716.0>: 124 ok INFO REPORT <11993.6730.0> 2011-01-03 13:32:43 =============================================================================== vbucketmigrator<0.6730.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6730.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6730.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6730.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6730.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6730.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6730.0>: Starting to move bucket 125 INFO REPORT <0.110.0> 2011-01-03 13:32:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.5454545454545454]], [['ns_1@10.2.1.102'| 0.6938775510204082]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:32:50 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.6730.0> 2011-01-03 13:32:50 =============================================================================== vbucketmigrator<0.6730.0>: Bucket 125 moved to the next server vbucketmigrator<0.6730.0>: Validate bucket states vbucketmigrator<0.6730.0>: 125 ok INFO REPORT <11993.6751.0> 2011-01-03 13:32:51 =============================================================================== vbucketmigrator<0.6751.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6751.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6751.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6751.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6751.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6751.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6751.0>: Starting to move bucket 126 INFO REPORT <0.110.0> 2011-01-03 13:32:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.5555555555555556]], [['ns_1@10.2.1.102'| 0.7006802721088435]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6751.0> 2011-01-03 13:32:59 =============================================================================== vbucketmigrator<0.6751.0>: Bucket 126 moved to the next server vbucketmigrator<0.6751.0>: Validate bucket states vbucketmigrator<0.6751.0>: 126 ok INFO REPORT <11993.6774.0> 2011-01-03 13:33:06 =============================================================================== vbucketmigrator<0.6774.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6774.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6774.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6774.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6774.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6774.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6774.0>: Starting to move bucket 127 INFO REPORT <0.93.0> 2011-01-03 13:33:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90387,404401}}, {active_buckets,["default"]}, {memory, [{total,19233216}, {processes,11111900}, {processes_used,11094332}, {system,8121316}, {atom,560301}, {atom_used,557531}, {binary,216216}, {code,4570913}, {ets,1351988}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2399}, {memory_data,{4284698624,4195975168,{<0.299.0>,3328596}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,96161792}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2390394,0}}, {context_switches,{727719,0}}, {garbage_collection,{133143,1428441353,0}}, {io,{{input,65564998},{output,34635102}}}, {reductions,{597864349,1528739}}, {run_queue,0}, {runtime,{30794,31}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90388,387403}}, {active_buckets,["default"]}, {memory, [{total,19160216}, {processes,10941556}, {processes_used,10932876}, {system,8218660}, {atom,559813}, {atom_used,556363}, {binary,344272}, {code,4551541}, {ets,1345988}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2330}, {memory_data,{4284698624,4211879936,{<11993.387.0>,4114268}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,52957184}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2320894,0}}, {context_switches,{407937,0}}, {garbage_collection,{98059,883520369,0}}, {io,{{input,53923316},{output,26830593}}}, {reductions,{264624405,2468782}}, {run_queue,0}, {runtime,{19312,234}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90388,153400}}, {active_buckets,["default"]}, {memory, [{total,14309672}, {processes,6824924}, {processes_used,6811500}, {system,7484748}, {atom,541077}, {atom_used,528744}, {binary,319312}, {code,4280811}, {ets,932036}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1318}, {memory_data,{4284698624,2859393024,{<10870.307.0>,2057352}}}, {disk_data, [{"C:\\",49423972,40},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1374195712}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1310315,0}}, {context_switches,{134937,0}}, {garbage_collection,{36683,179844566,0}}, {io,{{input,15680255},{output,11581045}}}, {reductions,{74299405,588282}}, {run_queue,0}, {runtime,{7534,15}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:33:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.5656565656565656]], [['ns_1@10.2.1.102'| 0.7074829931972789]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6774.0> 2011-01-03 13:33:13 =============================================================================== vbucketmigrator<0.6774.0>: Bucket 127 moved to the next server vbucketmigrator<0.6774.0>: Validate bucket states vbucketmigrator<0.6774.0>: 127 ok INFO REPORT <11993.6800.0> 2011-01-03 13:33:14 =============================================================================== vbucketmigrator<0.6800.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6800.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6800.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6800.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6800.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6800.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6800.0>: Starting to move bucket 128 INFO REPORT <0.110.0> 2011-01-03 13:33:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.5757575757575757]], [['ns_1@10.2.1.102'| 0.7142857142857143]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6800.0> 2011-01-03 13:33:21 =============================================================================== vbucketmigrator<0.6800.0>: Bucket 128 moved to the next server vbucketmigrator<0.6800.0>: Validate bucket states vbucketmigrator<0.6800.0>: 128 ok INFO REPORT <11993.6821.0> 2011-01-03 13:33:22 =============================================================================== vbucketmigrator<0.6821.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6821.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6821.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6821.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6821.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6821.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6821.0>: Starting to move bucket 129 INFO REPORT <0.85.0> 2011-01-03 13:33:28 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:33:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.5858585858585859]], [['ns_1@10.2.1.102'| 0.7210884353741497]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6821.0> 2011-01-03 13:33:32 =============================================================================== vbucketmigrator<0.6821.0>: Bucket 129 moved to the next server vbucketmigrator<0.6821.0>: Validate bucket states vbucketmigrator<0.6821.0>: 129 ok INFO REPORT <11993.6848.0> 2011-01-03 13:33:33 =============================================================================== vbucketmigrator<0.6848.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6848.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6848.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6848.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6848.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6848.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6848.0>: Starting to move bucket 130 INFO REPORT <0.110.0> 2011-01-03 13:33:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.595959595959596]], [['ns_1@10.2.1.102'| 0.727891156462585]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6848.0> 2011-01-03 13:33:41 =============================================================================== vbucketmigrator<0.6848.0>: Bucket 130 moved to the next server vbucketmigrator<0.6848.0>: Validate bucket states vbucketmigrator<0.6848.0>: 130 ok INFO REPORT <11993.6868.0> 2011-01-03 13:33:42 =============================================================================== vbucketmigrator<0.6868.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6868.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6868.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6868.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6868.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6868.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6868.0>: Starting to move bucket 131 INFO REPORT <0.110.0> 2011-01-03 13:33:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.6060606060606061]], [['ns_1@10.2.1.102'| 0.7346938775510203]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6868.0> 2011-01-03 13:33:51 =============================================================================== vbucketmigrator<0.6868.0>: Bucket 131 moved to the next server vbucketmigrator<0.6868.0>: Validate bucket states vbucketmigrator<0.6868.0>: 131 ok INFO REPORT <11993.6889.0> 2011-01-03 13:33:52 =============================================================================== vbucketmigrator<0.6889.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6889.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6889.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6889.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6889.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6889.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6889.0>: Starting to move bucket 132 INFO REPORT <0.85.0> 2011-01-03 13:33:52 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.110.0> 2011-01-03 13:33:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.6161616161616161]], [['ns_1@10.2.1.102'| 0.7414965986394557]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6889.0> 2011-01-03 13:34:01 =============================================================================== vbucketmigrator<0.6889.0>: Bucket 132 moved to the next server vbucketmigrator<0.6889.0>: Validate bucket states vbucketmigrator<0.6889.0>: 132 ok INFO REPORT <11993.6912.0> 2011-01-03 13:34:02 =============================================================================== vbucketmigrator<0.6912.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6912.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6912.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6912.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6912.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6912.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6912.0>: Starting to move bucket 133 INFO REPORT <0.93.0> 2011-01-03 13:34:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90447,402401}}, {active_buckets,["default"]}, {memory, [{total,24719912}, {processes,16608276}, {processes_used,16590660}, {system,8111636}, {atom,560301}, {atom_used,557531}, {binary,207928}, {code,4570913}, {ets,1350308}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2459}, {memory_data,{4284698624,4191133696,{<0.299.0>,1457152}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,91693056}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2450392,0}}, {context_switches,{743546,0}}, {garbage_collection,{135760,1469208879,0}}, {io,{{input,68775125},{output,35230213}}}, {reductions,{609024020,1145917}}, {run_queue,0}, {runtime,{31574,47}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90448,385401}}, {active_buckets,["default"]}, {memory, [{total,18776080}, {processes,10541580}, {processes_used,10532900}, {system,8234500}, {atom,559813}, {atom_used,556363}, {binary,361472}, {code,4551541}, {ets,1344556}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2390}, {memory_data,{4284698624,4249706496,{<11993.387.0>,3814168}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,43122688}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2380892,0}}, {context_switches,{419448,0}}, {garbage_collection,{100235,908940432,0}}, {io,{{input,55214973},{output,27263025}}}, {reductions,{273257638,2514409}}, {run_queue,0}, {runtime,{19796,203}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90448,151400}}, {active_buckets,["default"]}, {memory, [{total,14383576}, {processes,6894876}, {processes_used,6881452}, {system,7488700}, {atom,541077}, {atom_used,528744}, {binary,324240}, {code,4280811}, {ets,930780}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1378}, {memory_data,{4284698624,2917756928,{<10870.307.0>,1457152}}}, {disk_data, [{"C:\\",49423972,40},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1326698496}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1370313,0}}, {context_switches,{140252,0}}, {garbage_collection,{38400,188854481,0}}, {io,{{input,15931846},{output,11855793}}}, {reductions,{77822647,572519}}, {run_queue,0}, {runtime,{7706,16}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:34:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.6262626262626263]], [['ns_1@10.2.1.102'| 0.7482993197278911]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6912.0> 2011-01-03 13:34:11 =============================================================================== vbucketmigrator<0.6912.0>: Bucket 133 moved to the next server vbucketmigrator<0.6912.0>: Validate bucket states vbucketmigrator<0.6912.0>: 133 ok INFO REPORT <0.259.0> 2011-01-03 13:34:11 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 690 auth_errors 0 bucket_conns 21 bytes_read 5802576721 bytes_written 172365507 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 4039305 conn_yields 238 connection_structures 235 curr_connections 43 curr_items 2487416 curr_items_tot 4349914 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 4614 ep_commit_time 0 ep_commit_time_total 1400 ep_data_age 8 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 3 ep_flush_duration_highwat 297 ep_flush_duration_total 1870 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 790849 ep_io_num_write 4350022 ep_io_read_bytes 1004360426 ep_io_write_bytes 4640291077 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2389952726 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 645439 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1950025 ep_num_not_my_vbuckets 671884 ep_num_pager_runs 6 ep_num_value_ejects 1951599 ep_oom_errors 0 ep_overhead 35506024 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 4333 ep_storage_age 5 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 784593 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5040327806 ep_total_del_items 0 ep_total_enqueued 4354577 ep_total_new_items 4345597 ep_total_persisted 4350022 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2425458750 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 687 tap_mutation_received 1576969 tap_mutation_sent 3535909 tap_opaque_received 1468 tap_opaque_sent 2100 tap_vbucket_set_sent 1367 threads 4 time 1294090451 total_connections 1354 uptime 2465 version 1.4.4_304_g7d5a132 INFO REPORT <11993.6949.0> 2011-01-03 13:34:12 =============================================================================== vbucketmigrator<0.6949.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6949.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6949.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6949.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6949.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6949.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6949.0>: Starting to move bucket 134 INFO REPORT <0.85.0> 2011-01-03 13:34:17 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.110.0> 2011-01-03 13:34:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.6363636363636364]], [['ns_1@10.2.1.102'| 0.7551020408163265]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6949.0> 2011-01-03 13:34:19 =============================================================================== vbucketmigrator<0.6949.0>: Bucket 134 moved to the next server vbucketmigrator<0.6949.0>: Validate bucket states vbucketmigrator<0.6949.0>: 134 ok INFO REPORT <11993.6969.0> 2011-01-03 13:34:20 =============================================================================== vbucketmigrator<0.6969.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6969.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6969.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6969.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6969.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6969.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6969.0>: Starting to move bucket 135 INFO REPORT <0.110.0> 2011-01-03 13:34:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.6464646464646464]], [['ns_1@10.2.1.102'| 0.7619047619047619]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.6969.0> 2011-01-03 13:34:29 =============================================================================== vbucketmigrator<0.6969.0>: Bucket 135 moved to the next server vbucketmigrator<0.6969.0>: Validate bucket states vbucketmigrator<0.6969.0>: 135 ok INFO REPORT <11993.6996.0> 2011-01-03 13:34:30 =============================================================================== vbucketmigrator<0.6996.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.6996.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6996.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.6996.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.6996.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6996.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.6996.0>: Starting to move bucket 136 INFO REPORT <11993.6996.0> 2011-01-03 13:34:38 =============================================================================== vbucketmigrator<0.6996.0>: Bucket 136 moved to the next server vbucketmigrator<0.6996.0>: Validate bucket states vbucketmigrator<0.6996.0>: 136 ok INFO REPORT <0.110.0> 2011-01-03 13:34:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.6666666666666667]], [['ns_1@10.2.1.102'| 0.7755102040816326]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7014.0> 2011-01-03 13:34:39 =============================================================================== vbucketmigrator<0.7014.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7014.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7014.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7014.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7014.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7014.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7014.0>: Starting to move bucket 137 INFO REPORT <11993.7014.0> 2011-01-03 13:34:46 =============================================================================== vbucketmigrator<0.7014.0>: Bucket 137 moved to the next server vbucketmigrator<0.7014.0>: Validate bucket states vbucketmigrator<0.7014.0>: 137 ok INFO REPORT <11993.7037.0> 2011-01-03 13:34:47 =============================================================================== vbucketmigrator<0.7037.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7037.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7037.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7037.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7037.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7037.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7037.0>: Starting to move bucket 138 INFO REPORT <0.110.0> 2011-01-03 13:34:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.6767676767676767]], [['ns_1@10.2.1.102'| 0.782312925170068]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7037.0> 2011-01-03 13:34:56 =============================================================================== vbucketmigrator<0.7037.0>: Bucket 138 moved to the next server vbucketmigrator<0.7037.0>: Validate bucket states vbucketmigrator<0.7037.0>: 138 ok INFO REPORT <11993.7057.0> 2011-01-03 13:34:57 =============================================================================== vbucketmigrator<0.7057.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7057.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7057.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7057.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7057.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7057.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7057.0>: Starting to move bucket 139 INFO REPORT <0.110.0> 2011-01-03 13:34:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.6868686868686869]], [['ns_1@10.2.1.102'| 0.7891156462585034]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7057.0> 2011-01-03 13:35:05 =============================================================================== vbucketmigrator<0.7057.0>: Bucket 139 moved to the next server vbucketmigrator<0.7057.0>: Validate bucket states vbucketmigrator<0.7057.0>: 139 ok INFO REPORT <11993.7080.0> 2011-01-03 13:35:06 =============================================================================== vbucketmigrator<0.7080.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7080.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7080.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7080.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7080.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7080.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7080.0>: Starting to move bucket 140 INFO REPORT <0.93.0> 2011-01-03 13:35:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90507,400401}}, {active_buckets,["default"]}, {memory, [{total,20110872}, {processes,11949708}, {processes_used,11932140}, {system,8161164}, {atom,560301}, {atom_used,557531}, {binary,223408}, {code,4570913}, {ets,1384316}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2519}, {memory_data,{4284698624,4194017280,{<0.299.0>,2542924}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,87900160}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2510390,0}}, {context_switches,{760557,0}}, {garbage_collection,{138399,1511236307,0}}, {io,{{input,71301536},{output,35826165}}}, {reductions,{621532464,1273922}}, {run_queue,0}, {runtime,{32651,125}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90508,399401}}, {active_buckets,["default"]}, {memory, [{total,21909056}, {processes,13652996}, {processes_used,13644316}, {system,8256060}, {atom,559813}, {atom_used,556363}, {binary,347408}, {code,4551541}, {ets,1379940}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2450}, {memory_data,{4284698624,4251156480,{<11993.387.0>,3814168}}}, {disk_data, [{"C:\\",46243100,45},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,42967040}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2440890,0}}, {context_switches,{431470,0}}, {garbage_collection,{102882,934727919,0}}, {io,{{input,57307977},{output,28409026}}}, {reductions,{281989428,2490649}}, {run_queue,0}, {runtime,{20202,141}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90508,149400}}, {active_buckets,["default"]}, {memory, [{total,13785088}, {processes,6261836}, {processes_used,6248412}, {system,7523252}, {atom,541077}, {atom_used,528744}, {binary,324808}, {code,4280811}, {ets,964740}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1438}, {memory_data,{4284698624,2966106112,{<10870.307.0>,1757252}}}, {disk_data, [{"C:\\",49423972,40},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1278640128}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1430326,0}}, {context_switches,{145717,0}}, {garbage_collection,{40253,198054455,0}}, {io,{{input,16173204},{output,12125213}}}, {reductions,{81374664,602577}}, {run_queue,0}, {runtime,{7831,31}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:35:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.696969696969697]], [['ns_1@10.2.1.102'| 0.7959183673469388]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7080.0> 2011-01-03 13:35:12 =============================================================================== vbucketmigrator<0.7080.0>: Bucket 140 moved to the next server vbucketmigrator<0.7080.0>: Validate bucket states vbucketmigrator<0.7080.0>: 140 ok INFO REPORT <11993.7099.0> 2011-01-03 13:35:13 =============================================================================== vbucketmigrator<0.7099.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7099.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7099.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7099.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7099.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7099.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7099.0>: Starting to move bucket 141 INFO REPORT <0.85.0> 2011-01-03 13:35:14 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.110.0> 2011-01-03 13:35:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.7070707070707071]], [['ns_1@10.2.1.102'| 0.8027210884353742]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7099.0> 2011-01-03 13:35:20 =============================================================================== vbucketmigrator<0.7099.0>: Bucket 141 moved to the next server vbucketmigrator<0.7099.0>: Validate bucket states vbucketmigrator<0.7099.0>: 141 ok INFO REPORT <11993.7117.0> 2011-01-03 13:35:21 =============================================================================== vbucketmigrator<0.7117.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7117.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7117.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7117.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7117.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7117.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7117.0>: Starting to move bucket 142 INFO REPORT <0.85.0> 2011-01-03 13:35:23 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:35:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.7171717171717171]], [['ns_1@10.2.1.102'| 0.8095238095238095]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7117.0> 2011-01-03 13:35:32 =============================================================================== vbucketmigrator<0.7117.0>: Bucket 142 moved to the next server vbucketmigrator<0.7117.0>: Validate bucket states vbucketmigrator<0.7117.0>: 142 ok INFO REPORT <11993.7143.0> 2011-01-03 13:35:33 =============================================================================== vbucketmigrator<0.7143.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7143.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7143.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7143.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7143.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7143.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7143.0>: Starting to move bucket 143 INFO REPORT <0.85.0> 2011-01-03 13:35:34 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:35:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.7272727272727273]], [['ns_1@10.2.1.102'| 0.8163265306122449]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7143.0> 2011-01-03 13:35:40 =============================================================================== vbucketmigrator<0.7143.0>: Bucket 143 moved to the next server vbucketmigrator<0.7143.0>: Validate bucket states vbucketmigrator<0.7143.0>: 143 ok INFO REPORT <11993.7164.0> 2011-01-03 13:35:41 =============================================================================== vbucketmigrator<0.7164.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7164.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7164.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7164.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7164.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7164.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7164.0>: Starting to move bucket 144 INFO REPORT <0.110.0> 2011-01-03 13:35:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.7373737373737373]], [['ns_1@10.2.1.102'| 0.8231292517006803]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:35:48 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.7164.0> 2011-01-03 13:35:50 =============================================================================== vbucketmigrator<0.7164.0>: Bucket 144 moved to the next server vbucketmigrator<0.7164.0>: Validate bucket states vbucketmigrator<0.7164.0>: 144 ok INFO REPORT <0.259.0> 2011-01-03 13:35:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 690 auth_errors 0 bucket_conns 21 bytes_read 5815478987 bytes_written 181245636 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 4259859 conn_yields 238 connection_structures 235 curr_connections 43 curr_items 2635988 curr_items_tot 4498486 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 4848 ep_commit_time 0 ep_commit_time_total 1454 ep_data_age 5 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 2 ep_flush_duration_highwat 297 ep_flush_duration_total 1932 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 2700 ep_io_num_read 790849 ep_io_num_write 4498071 ep_io_read_bytes 1004360426 ep_io_write_bytes 4644212028 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2407307874 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 645317 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1949903 ep_num_not_my_vbuckets 743346 ep_num_pager_runs 6 ep_num_value_ejects 1951599 ep_oom_errors 0 ep_overhead 35556607 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 2677 ep_storage_age 4 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 784593 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5057682954 ep_total_del_items 0 ep_total_enqueued 4503744 ep_total_new_items 4493123 ep_total_persisted 4498071 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2442864481 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 687 tap_mutation_received 1576969 tap_mutation_sent 3535909 tap_opaque_received 1468 tap_opaque_sent 2100 tap_vbucket_set_sent 1367 threads 4 time 1294090551 total_connections 1354 uptime 2565 version 1.4.4_304_g7d5a132 INFO REPORT <11993.7184.0> 2011-01-03 13:35:51 =============================================================================== vbucketmigrator<0.7184.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7184.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7184.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7184.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7184.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7184.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7184.0>: Starting to move bucket 145 INFO REPORT <0.85.0> 2011-01-03 13:35:57 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:35:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.7474747474747474]], [['ns_1@10.2.1.102'| 0.8299319727891157]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7184.0> 2011-01-03 13:35:59 =============================================================================== vbucketmigrator<0.7184.0>: Bucket 145 moved to the next server vbucketmigrator<0.7184.0>: Validate bucket states vbucketmigrator<0.7184.0>: 145 ok INFO REPORT <11993.7207.0> 2011-01-03 13:36:01 =============================================================================== vbucketmigrator<0.7207.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7207.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7207.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7207.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7207.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7207.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7207.0>: Starting to move bucket 146 INFO REPORT <0.93.0> 2011-01-03 13:36:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90567,414401}}, {active_buckets,["default"]}, {memory, [{total,19831560}, {processes,11664556}, {processes_used,11646436}, {system,8167004}, {atom,560301}, {atom_used,557531}, {binary,194560}, {code,4570913}, {ets,1419772}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2579}, {memory_data,{4284698624,4198019072,{<0.299.0>,2357452}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,76664832}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2570403,0}}, {context_switches,{776347,0}}, {garbage_collection,{140946,1548774188,0}}, {io,{{input,74595924},{output,37809541}}}, {reductions,{632309333,1026734}}, {run_queue,0}, {runtime,{33789,62}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90568,397401}}, {active_buckets,["default"]}, {memory, [{total,20717224}, {processes,12449772}, {processes_used,12441500}, {system,8267452}, {atom,559813}, {atom_used,556363}, {binary,359752}, {code,4551541}, {ets,1378804}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2510}, {memory_data,{4284698624,4251791360,{<11993.387.0>,6656756}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,33972224}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2500919,31}}, {context_switches,{442921,0}}, {garbage_collection,{105018,958327777,0}}, {io,{{input,58650578},{output,28844371}}}, {reductions,{290712192,2634686}}, {run_queue,0}, {runtime,{20732,187}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90568,163400}}, {active_buckets,["default"]}, {memory, [{total,13966808}, {processes,6410940}, {processes_used,6397516}, {system,7555868}, {atom,541077}, {atom_used,528744}, {binary,320496}, {code,4280811}, {ets,1001852}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1498}, {memory_data,{4284698624,3014004736,{<10870.218.0>,1271780}}}, {disk_data, [{"C:\\",49423972,40},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1221173248}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1490324,0}}, {context_switches,{151386,0}}, {garbage_collection,{42066,207195116,0}}, {io,{{input,17173629},{output,13241789}}}, {reductions,{85019752,574138}}, {run_queue,0}, {runtime,{8002,15}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:36:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.7575757575757576]], [['ns_1@10.2.1.102'| 0.8367346938775511]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7207.0> 2011-01-03 13:36:09 =============================================================================== vbucketmigrator<0.7207.0>: Bucket 146 moved to the next server vbucketmigrator<0.7207.0>: Validate bucket states vbucketmigrator<0.7207.0>: 146 ok INFO REPORT <11993.7228.0> 2011-01-03 13:36:10 =============================================================================== vbucketmigrator<0.7228.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7228.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7228.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7228.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7228.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7228.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7228.0>: Starting to move bucket 147 INFO REPORT <11993.7228.0> 2011-01-03 13:36:18 =============================================================================== vbucketmigrator<0.7228.0>: Bucket 147 moved to the next server vbucketmigrator<0.7228.0>: Validate bucket states vbucketmigrator<0.7228.0>: 147 ok INFO REPORT <0.110.0> 2011-01-03 13:36:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.7777777777777778]], [['ns_1@10.2.1.102'| 0.8503401360544218]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7244.0> 2011-01-03 13:36:19 =============================================================================== vbucketmigrator<0.7244.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7244.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7244.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7244.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7244.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7244.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7244.0>: Starting to move bucket 148 INFO REPORT <11993.7244.0> 2011-01-03 13:36:27 =============================================================================== vbucketmigrator<0.7244.0>: Bucket 148 moved to the next server vbucketmigrator<0.7244.0>: Validate bucket states vbucketmigrator<0.7244.0>: 148 ok INFO REPORT <0.110.0> 2011-01-03 13:36:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.7878787878787878]], [['ns_1@10.2.1.102'| 0.8571428571428572]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7267.0> 2011-01-03 13:36:28 =============================================================================== vbucketmigrator<0.7267.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7267.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7267.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7267.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7267.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7267.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7267.0>: Starting to move bucket 149 INFO REPORT <0.85.0> 2011-01-03 13:36:29 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.9251.0> 2011-01-03 13:36:30 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.9242.0> 2011-01-03 13:36:32 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <11993.7267.0> 2011-01-03 13:36:38 =============================================================================== vbucketmigrator<0.7267.0>: Bucket 149 moved to the next server vbucketmigrator<0.7267.0>: Validate bucket states vbucketmigrator<0.7267.0>: 149 ok INFO REPORT <0.110.0> 2011-01-03 13:36:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.797979797979798]], [['ns_1@10.2.1.102'| 0.8639455782312925]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7295.0> 2011-01-03 13:36:39 =============================================================================== vbucketmigrator<0.7295.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7295.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7295.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7295.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7295.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7295.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7295.0>: Starting to move bucket 150 INFO REPORT <11993.7295.0> 2011-01-03 13:36:46 =============================================================================== vbucketmigrator<0.7295.0>: Bucket 150 moved to the next server vbucketmigrator<0.7295.0>: Validate bucket states vbucketmigrator<0.7295.0>: 150 ok INFO REPORT <11993.7314.0> 2011-01-03 13:36:48 =============================================================================== vbucketmigrator<0.7314.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7314.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7314.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7314.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7314.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7314.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7314.0>: Starting to move bucket 151 INFO REPORT <0.110.0> 2011-01-03 13:36:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8080808080808081]], [['ns_1@10.2.1.102'| 0.8707482993197279]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:36:52 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.7314.0> 2011-01-03 13:36:55 =============================================================================== vbucketmigrator<0.7314.0>: Bucket 151 moved to the next server vbucketmigrator<0.7314.0>: Validate bucket states vbucketmigrator<0.7314.0>: 151 ok INFO REPORT <11993.7331.0> 2011-01-03 13:36:56 =============================================================================== vbucketmigrator<0.7331.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7331.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7331.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7331.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7331.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7331.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7331.0>: Starting to move bucket 152 INFO REPORT <0.110.0> 2011-01-03 13:36:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8181818181818181]], [['ns_1@10.2.1.102'| 0.8775510204081632]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7331.0> 2011-01-03 13:37:04 =============================================================================== vbucketmigrator<0.7331.0>: Bucket 152 moved to the next server vbucketmigrator<0.7331.0>: Validate bucket states vbucketmigrator<0.7331.0>: 152 ok INFO REPORT <11993.7359.0> 2011-01-03 13:37:05 =============================================================================== vbucketmigrator<0.7359.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7359.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7359.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7359.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7359.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7359.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7359.0>: Starting to move bucket 153 INFO REPORT <0.93.0> 2011-01-03 13:37:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90627,412401}}, {active_buckets,["default"]}, {memory, [{total,32027848}, {processes,23802580}, {processes_used,23786132}, {system,8225268}, {atom,560301}, {atom_used,557531}, {binary,217448}, {code,4570913}, {ets,1453844}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2639}, {memory_data,{4284698624,4209217536,{<0.299.0>,3328596}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,72941568}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2630401,0}}, {context_switches,{792331,0}}, {garbage_collection,{143539,1587677172,0}}, {io,{{input,77556280},{output,38435951}}}, {reductions,{643688067,1451112}}, {run_queue,0}, {runtime,{35225,219}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90627,412402}}, {active_buckets,["default"]}, {memory, [{total,19345448}, {processes,11062052}, {processes_used,11053372}, {system,8283396}, {atom,559813}, {atom_used,556363}, {binary,342616}, {code,4551541}, {ets,1411660}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2569}, {memory_data,{4284698624,4227813376,{<11993.387.0>,5385512}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,55283712}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2560902,0}}, {context_switches,{453730,0}}, {garbage_collection,{107170,982689724,0}}, {io,{{input,60050811},{output,29255089}}}, {reductions,{298118677,1285626}}, {run_queue,0}, {runtime,{21512,140}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90628,161401}}, {active_buckets,["default"]}, {memory, [{total,14932424}, {processes,7339740}, {processes_used,7326316}, {system,7592684}, {atom,541077}, {atom_used,528744}, {binary,323256}, {code,4280811}, {ets,1035876}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1558}, {memory_data,{4284698624,3072765952,{<10870.307.0>,1271680}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1177276416}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1550322,0}}, {context_switches,{156768,0}}, {garbage_collection,{43792,216306527,0}}, {io,{{input,17432718},{output,13510439}}}, {reductions,{88588789,608700}}, {run_queue,0}, {runtime,{8143,31}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:37:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8282828282828283]], [['ns_1@10.2.1.102'| 0.8843537414965986]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7359.0> 2011-01-03 13:37:17 =============================================================================== vbucketmigrator<0.7359.0>: Bucket 153 moved to the next server vbucketmigrator<0.7359.0>: Validate bucket states vbucketmigrator<0.7359.0>: 153 ok INFO REPORT <0.110.0> 2011-01-03 13:37:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8383838383838383]], [['ns_1@10.2.1.102'| 0.891156462585034]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7408.0> 2011-01-03 13:37:19 =============================================================================== vbucketmigrator<0.7408.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7408.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7408.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7408.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7408.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7408.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7408.0>: Starting to move bucket 154 INFO REPORT <0.110.0> 2011-01-03 13:37:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8383838383838383]], [['ns_1@10.2.1.102'| 0.891156462585034]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7408.0> 2011-01-03 13:37:31 =============================================================================== vbucketmigrator<0.7408.0>: Bucket 154 moved to the next server vbucketmigrator<0.7408.0>: Validate bucket states vbucketmigrator<0.7408.0>: 154 ok INFO REPORT <0.259.0> 2011-01-03 13:37:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 690 auth_errors 0 bucket_conns 51 bytes_read 5828103474 bytes_written 190064052 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 4475668 conn_yields 238 connection_structures 235 curr_connections 73 curr_items 2778356 curr_items_tot 4640854 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 5041 ep_commit_time 0 ep_commit_time_total 1500 ep_data_age 8 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 8 ep_flush_duration_highwat 297 ep_flush_duration_total 1985 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 6222 ep_io_num_read 790849 ep_io_num_write 4627621 ep_io_read_bytes 1004360426 ep_io_write_bytes 4647642648 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2423949450 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 645181 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1949767 ep_num_not_my_vbuckets 816243 ep_num_pager_runs 6 ep_num_value_ejects 1951599 ep_oom_errors 0 ep_overhead 36204418 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 12516 ep_storage_age 8 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 784593 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5074324530 ep_total_del_items 0 ep_total_enqueued 4646709 ep_total_new_items 4622190 ep_total_persisted 4627621 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2460153868 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 687 tap_mutation_received 1576969 tap_mutation_sent 3535909 tap_opaque_received 1468 tap_opaque_sent 2100 tap_vbucket_set_sent 1367 threads 4 time 1294090651 total_connections 1404 uptime 2665 version 1.4.4_304_g7d5a132 INFO REPORT <11993.7442.0> 2011-01-03 13:37:33 =============================================================================== vbucketmigrator<0.7442.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7442.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7442.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7442.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7442.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7442.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7442.0>: Starting to move bucket 155 INFO REPORT <0.110.0> 2011-01-03 13:37:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8484848484848485]], [['ns_1@10.2.1.102'| 0.8979591836734694]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.85.0> 2011-01-03 13:37:40 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <11993.7442.0> 2011-01-03 13:37:43 =============================================================================== vbucketmigrator<0.7442.0>: Bucket 155 moved to the next server vbucketmigrator<0.7442.0>: Validate bucket states vbucketmigrator<0.7442.0>: 155 ok INFO REPORT <11993.7467.0> 2011-01-03 13:37:45 =============================================================================== vbucketmigrator<0.7467.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7467.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7467.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7467.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7467.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7467.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7467.0>: Starting to move bucket 156 INFO REPORT <0.10280.0> 2011-01-03 13:37:48 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.110.0> 2011-01-03 13:37:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8585858585858586]], [['ns_1@10.2.1.102'| 0.9047619047619048]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.10269.0> 2011-01-03 13:37:48 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.10245.0> 2011-01-03 13:37:49 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.10235.0> 2011-01-03 13:37:50 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.10205.0> 2011-01-03 13:37:51 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <11993.7467.0> 2011-01-03 13:37:56 =============================================================================== vbucketmigrator<0.7467.0>: Bucket 156 moved to the next server vbucketmigrator<0.7467.0>: Validate bucket states vbucketmigrator<0.7467.0>: 156 ok INFO REPORT <11993.7500.0> 2011-01-03 13:37:57 =============================================================================== vbucketmigrator<0.7500.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7500.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7500.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7500.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7500.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7500.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7500.0>: Starting to move bucket 157 INFO REPORT <0.110.0> 2011-01-03 13:37:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8686868686868687]], [['ns_1@10.2.1.102'| 0.9115646258503401]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7500.0> 2011-01-03 13:38:07 =============================================================================== vbucketmigrator<0.7500.0>: Bucket 157 moved to the next server vbucketmigrator<0.7500.0>: Validate bucket states vbucketmigrator<0.7500.0>: 157 ok INFO REPORT <0.93.0> 2011-01-03 13:38:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90687,410401}}, {active_buckets,["default"]}, {memory, [{total,28796400}, {processes,20608108}, {processes_used,20588804}, {system,8188292}, {atom,560301}, {atom_used,557531}, {binary,184152}, {code,4570913}, {ets,1452116}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2699}, {memory_data,{4284698624,4217647104,{<0.10089.0>,6731832}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,53719040}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2690399,0}}, {context_switches,{810669,0}}, {garbage_collection,{146530,1630903313,0}}, {io,{{input,79897346},{output,39161522}}}, {reductions,{656412781,1093950}}, {run_queue,0}, {runtime,{36613,172}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90687,410402}}, {active_buckets,["default"]}, {memory, [{total,20036968}, {processes,11681004}, {processes_used,11672324}, {system,8355964}, {atom,559813}, {atom_used,556363}, {binary,379776}, {code,4551541}, {ets,1447156}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2629}, {memory_data,{4284698624,4252856320,{<11993.387.0>,4599840}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,31432704}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2620900,0}}, {context_switches,{466029,0}}, {garbage_collection,{109470,1008339802,0}}, {io,{{input,61681382},{output,31148497}}}, {reductions,{307024563,875702}}, {run_queue,0}, {runtime,{22713,125}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90688,174400}}, {active_buckets,["default"]}, {memory, [{total,15034616}, {processes,7445156}, {processes_used,7431732}, {system,7589460}, {atom,541077}, {atom_used,528744}, {binary,321144}, {code,4280811}, {ets,1034508}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1618}, {memory_data,{4284698624,3119173632,{<10870.307.0>,2057352}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1126793216}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1610320,0}}, {context_switches,{162460,0}}, {garbage_collection,{45520,225361756,0}}, {io,{{input,17679000},{output,13783534}}}, {reductions,{92154806,579768}}, {run_queue,0}, {runtime,{8190,16}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:38:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8787878787878788]], [['ns_1@10.2.1.102'| 0.9183673469387755]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7522.0> 2011-01-03 13:38:09 =============================================================================== vbucketmigrator<0.7522.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7522.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7522.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7522.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7522.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7522.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7522.0>: Starting to move bucket 158 INFO REPORT <0.110.0> 2011-01-03 13:38:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8787878787878788]], [['ns_1@10.2.1.102'| 0.9183673469387755]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7522.0> 2011-01-03 13:38:18 =============================================================================== vbucketmigrator<0.7522.0>: Bucket 158 moved to the next server vbucketmigrator<0.7522.0>: Validate bucket states vbucketmigrator<0.7522.0>: 158 ok INFO REPORT <11993.7553.0> 2011-01-03 13:38:20 =============================================================================== vbucketmigrator<0.7553.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7553.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7553.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7553.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7553.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7553.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7553.0>: Starting to move bucket 159 INFO REPORT <0.110.0> 2011-01-03 13:38:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.8888888888888888]], [['ns_1@10.2.1.102'| 0.9251700680272109]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7553.0> 2011-01-03 13:38:34 =============================================================================== vbucketmigrator<0.7553.0>: Bucket 159 moved to the next server vbucketmigrator<0.7553.0>: Validate bucket states vbucketmigrator<0.7553.0>: 159 ok INFO REPORT <11993.7586.0> 2011-01-03 13:38:35 =============================================================================== vbucketmigrator<0.7586.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7586.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7586.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7586.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7586.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7586.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7586.0>: Starting to move bucket 160 INFO REPORT <0.85.0> 2011-01-03 13:38:35 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.110.0> 2011-01-03 13:38:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.898989898989899]], [['ns_1@10.2.1.102'| 0.9319727891156463]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7586.0> 2011-01-03 13:38:47 =============================================================================== vbucketmigrator<0.7586.0>: Bucket 160 moved to the next server vbucketmigrator<0.7586.0>: Validate bucket states vbucketmigrator<0.7586.0>: 160 ok INFO REPORT <0.110.0> 2011-01-03 13:38:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.9090909090909091]], [['ns_1@10.2.1.102'| 0.9387755102040817]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7614.0> 2011-01-03 13:38:48 =============================================================================== vbucketmigrator<0.7614.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7614.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7614.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7614.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7614.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7614.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7614.0>: Starting to move bucket 161 INFO REPORT <0.110.0> 2011-01-03 13:38:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.9090909090909091]], [['ns_1@10.2.1.102'| 0.9387755102040817]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7614.0> 2011-01-03 13:38:59 =============================================================================== vbucketmigrator<0.7614.0>: Bucket 161 moved to the next server vbucketmigrator<0.7614.0>: Validate bucket states vbucketmigrator<0.7614.0>: 161 ok INFO REPORT <11993.7643.0> 2011-01-03 13:39:00 =============================================================================== vbucketmigrator<0.7643.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7643.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7643.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7643.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7643.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7643.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7643.0>: Starting to move bucket 162 INFO REPORT <0.93.0> 2011-01-03 13:39:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90747,408401}}, {active_buckets,["default"]}, {memory, [{total,29100392}, {processes,20880580}, {processes_used,20861732}, {system,8219812}, {atom,560301}, {atom_used,557531}, {binary,179784}, {code,4570913}, {ets,1487820}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2759}, {memory_data,{4284698624,4235722752,{<0.10365.0>,6731832}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,53870592}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2750397,0}}, {context_switches,{824998,0}}, {garbage_collection,{148955,1661418589,0}}, {io,{{input,82099686},{output,40409970}}}, {reductions,{666312243,1003364}}, {run_queue,0}, {runtime,{37643,94}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90747,408402}}, {active_buckets,["default"]}, {memory, [{total,20108128}, {processes,11733236}, {processes_used,11724556}, {system,8374892}, {atom,559813}, {atom_used,556363}, {binary,365480}, {code,4551541}, {ets,1480276}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2689}, {memory_data,{4284698624,4252999680,{<11993.387.0>,4599840}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,67629056}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2680898,0}}, {context_switches,{477764,0}}, {garbage_collection,{111704,1034634577,0}}, {io,{{input,63133395},{output,31570276}}}, {reductions,{316145554,863360}}, {run_queue,0}, {runtime,{23618,78}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90748,172400}}, {active_buckets,["default"]}, {memory, [{total,13395696}, {processes,5768900}, {processes_used,5755476}, {system,7626796}, {atom,541077}, {atom_used,528744}, {binary,323144}, {code,4280811}, {ets,1070060}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1678}, {memory_data,{4284698624,3163312128,{<10870.307.0>,2057352}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",1.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1111736320}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1670318,0}}, {context_switches,{168064,0}}, {garbage_collection,{47435,233782433,0}}, {io,{{input,18681444},{output,14783489}}}, {reductions,{95743828,583328}}, {run_queue,0}, {runtime,{8408,31}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:39:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.9191919191919192]], [['ns_1@10.2.1.102'| 0.9455782312925171]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <11993.7643.0> 2011-01-03 13:39:11 =============================================================================== vbucketmigrator<0.7643.0>: Bucket 162 moved to the next server vbucketmigrator<0.7643.0>: Validate bucket states vbucketmigrator<0.7643.0>: 162 ok INFO REPORT <0.259.0> 2011-01-03 13:39:11 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 690 auth_errors 0 bucket_conns 1 bytes_read 5832372107 bytes_written 193742618 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 4548601 conn_yields 238 connection_structures 235 curr_connections 23 curr_items 2825780 curr_items_tot 4688278 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 5121 ep_commit_time 0 ep_commit_time_total 1520 ep_data_age 15 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 3 ep_flush_duration_highwat 297 ep_flush_duration_total 2012 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 790849 ep_io_num_write 4693971 ep_io_read_bytes 1004360426 ep_io_write_bytes 4649399717 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2429495042 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 645143 ep_num_eject_failures 1689334 ep_num_eject_replicas 871116 ep_num_expiry_pager_runs 0 ep_num_non_resident 1949729 ep_num_not_my_vbuckets 841561 ep_num_pager_runs 6 ep_num_value_ejects 1951599 ep_oom_errors 0 ep_overhead 35295992 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 0 ep_storage_age 0 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 784593 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5079870122 ep_total_del_items 0 ep_total_enqueued 4694331 ep_total_new_items 4688278 ep_total_persisted 4693971 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2464791034 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 687 tap_mutation_received 1576969 tap_mutation_sent 3535909 tap_opaque_received 1468 tap_opaque_sent 2100 tap_vbucket_set_sent 1367 threads 4 time 1294090750 total_connections 1404 uptime 2764 version 1.4.4_304_g7d5a132 INFO REPORT <11993.7666.0> 2011-01-03 13:39:12 =============================================================================== vbucketmigrator<0.7666.0>: Connecting to {Sock 10.2.1.102:11210} vbucketmigrator<0.7666.0>: Authenticating towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7666.0>: Authenticated towards: {Sock 10.2.1.102:11210} vbucketmigrator<0.7666.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.7666.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7666.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.7666.0>: Starting to move bucket 163 INFO REPORT <0.110.0> 2011-01-03 13:39:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.8123.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 1.0]], [['ns_1@10.2.1.101'| 0.9292929292929293]], [['ns_1@10.2.1.102'| 0.9523809523809523]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.110.0> 2011-01-03 13:39:18 =============================================================================== ns_log: logging ns_orchestrator:2:Rebalance exited with reason stopped INFO REPORT <0.65.0> 2011-01-03 13:39:18 =============================================================================== config change: buckets -> [{configs,[{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']}, {map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101','ns_1@10.2.1.100'], ['ns_1@10.2.1.101'|...], [...]|...]}]}]}] INFO REPORT <0.65.0> 2011-01-03 13:39:18 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:39:18 =============================================================================== Pushing config ERROR REPORT <11993.7666.0> 2011-01-03 13:39:18 =============================================================================== ** Generic server <11993.7666.0> terminating ** Last message in was {'EXIT',<11993.7665.0>,stopped} ** When Server state == {state,#Port<11993.5725>,vbucketmigrator, {["Starting to move bucket 163", "Authenticated towards: {Sock 10.2.1.101:11210}"], ["Authenticating towards: {Sock 10.2.1.101:11210}"]}, undefined,[],0} ** Reason for termination == ** stopped CRASH REPORT <11993.7666.0> 2011-01-03 13:39:18 =============================================================================== Crashing process initial_call {ns_port_server,init,['Argument__1']} pid <11993.7666.0> registered_name [] error_info {exit,stopped, [{gen_server,terminate,6},{proc_lib,init_p_do_apply,3}]} ancestors [<11993.7665.0>] messages [{'EXIT',#Port<11993.5725>,normal}] links [] dictionary [] trap_exit true status running heap_size 1597 stack_size 24 reductions 443 INFO REPORT <0.85.0> 2011-01-03 13:39:19 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 13:39:19 =============================================================================== config change: rebalance_status -> {none,<<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>} INFO REPORT <0.65.0> 2011-01-03 13:39:19 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:39:19 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:39:19 =============================================================================== Pushing config done INFO REPORT <0.110.0> 2011-01-03 13:39:25 =============================================================================== ns_log: logging ns_orchestrator:4:Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] INFO REPORT <0.65.0> 2011-01-03 13:39:25 =============================================================================== config change: {node,'ns_1@10.2.1.102',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:39:25 =============================================================================== config change: {node,'ns_1@10.2.1.101',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:39:25 =============================================================================== config change: {node,'ns_1@10.2.1.100',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:39:25 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:39:25 =============================================================================== Pushing config INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 53 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 54 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 57 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 58 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 61 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 62 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 65 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 66 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 69 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 70 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 71 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 72 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 73 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 74 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 75 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 76 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 77 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 78 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 79 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 80 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 81 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 82 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 83 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 84 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 85 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. INFO REPORT <0.85.0> 2011-01-03 13:39:25 =============================================================================== Pushing config done ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 86 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 87 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 88 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 89 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 90 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 91 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 92 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. INFO REPORT <0.65.0> 2011-01-03 13:39:25 =============================================================================== config change: rebalance_status -> running ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 93 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 94 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 95 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 96 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 97 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 98 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 99 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. INFO REPORT <0.65.0> 2011-01-03 13:39:25 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:39:25 =============================================================================== Pushing config ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 100 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 101 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 102 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 103 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 104 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 105 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 106 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 107 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 108 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 109 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 110 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 111 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 112 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. INFO REPORT <0.85.0> 2011-01-03 13:39:25 =============================================================================== Pushing config done ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 113 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 114 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 115 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 116 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 117 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 118 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 119 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 120 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 121 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 122 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 123 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 124 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 125 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 126 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 127 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 128 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 129 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 130 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 131 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 132 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 133 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 134 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 135 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 136 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 137 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 138 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 139 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 140 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 141 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 142 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 143 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 144 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 145 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 146 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 147 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 148 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 149 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 150 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 151 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 152 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 153 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 154 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 155 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 156 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 157 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 158 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 159 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 160 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 161 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 162 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:209: Deleting vbucket 163 in "default" on 'ns_1@10.2.1.102' INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 512 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 513 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 514 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 515 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 516 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 517 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 518 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 519 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 520 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 521 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 522 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 523 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 524 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 525 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 526 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 527 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 528 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 529 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 530 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 531 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 532 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 533 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 534 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 535 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 536 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 537 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 538 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 539 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 540 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 541 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 542 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 543 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 544 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 545 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 546 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 547 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 548 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 549 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 550 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 551 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 552 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 553 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 554 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 555 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 556 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 557 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 558 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 559 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 560 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 561 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 562 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 563 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 564 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 565 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 566 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 567 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 568 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 569 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 570 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 571 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 572 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 573 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 574 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 575 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 576 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 577 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 578 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 579 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 580 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 581 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 582 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 583 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 584 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 585 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 586 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 587 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 588 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 589 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 590 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 591 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 592 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 593 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 594 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 595 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 596 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 597 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 598 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 599 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 600 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 601 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 602 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 603 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 604 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 605 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 606 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 607 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 608 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 609 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 610 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 611 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 612 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 613 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 614 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 615 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 616 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 617 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 618 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 619 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 620 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 621 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 622 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 623 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 624 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 625 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 626 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 627 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 628 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 629 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 630 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 631 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 632 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 633 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 634 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.100', dead}} ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 635 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 636 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 637 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 638 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 639 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 640 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 641 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 642 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 643 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 644 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 645 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 646 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 647 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 648 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 649 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 650 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 651 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 652 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 653 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 654 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 655 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 656 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 657 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 658 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 659 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 660 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 661 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 662 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 663 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 664 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 665 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 666 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 667 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 668 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 669 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 670 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 671 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 672 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 673 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 674 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 675 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 676 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 677 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 678 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 679 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 680 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 681 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. ERROR REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:161: Master for vbucket 682 in "default" is not active, but 'ns_1@10.2.1.102' is, so making that the master. INFO REPORT <0.65.0> 2011-01-03 13:39:25 =============================================================================== config change: buckets -> [{configs,[{"default", [{num_replicas,1}, {ram_quota,3426746368}, {auth_type,sasl}, {sasl_password,[]}, {type,membase}, {num_vbuckets,1024}, {ht_size,3079}, {tap_keepalive,0}, {tap_noop_interval,20}, {max_txn_size,1000}, {ht_locks,5}, {servers,['ns_1@10.2.1.100','ns_1@10.2.1.101','ns_1@10.2.1.102']}, {map,[['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.101'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102','ns_1@10.2.1.100'], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102',undefined], ['ns_1@10.2.1.102'|...], [...]|...]}]}]}] INFO REPORT <0.65.0> 2011-01-03 13:39:25 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:39:25 =============================================================================== Pushing config INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [1023,1022,1021,1020,1019,1018,1017,1016,1015,1014,1013,1012,1011,1010,1009,1008,1007,1006,1005,1004,1003,1002,1001,1000,999,998,997,996,995,994,993,992,991,990,989,988,987,986,985,984,983,982,981,980,979,978,977,976,975,974,973,972,971,970,969,968,967,966,965,964,963,962,961,960,959,958,957,956,955,954,953,952,951,950,949,948,947,946,945,944,943,942,941,940,939,938,937,936,935,934,933,932,931,930,929,928,927,926,925,924,923,922,921,920,919,918,917,916,915,914,913,912,911,910,909,908,907,906,905,904,903,902,901,900,899,898,897,896,895,894,893,892,891,890,889,888,887,886,885,884,883,882,881,880,879,878,877,876,875,874,873,872,871,870,869,868,867,866,865,864,863,862,861,860,859,858,857,856,855,854,853,852,851,850,849,848,847,846,845,844,843,842,841,840,839,838,837,836,835,834,833,832,831,830,829,828,827,826,825,824,823,822,821,820,819,818,817,816,815,814,813,812,811,810,809,808,807,806,805,804,803,802,801,800,799,798,797,796,795,794,793,792,791,790,789,788,787,786,785,784,783,782,781,780,779,778,777,776,775,774,773,772,771,770,769,768,767,766,765,764,763,762,761,760,759,758,757,756,755,754,753,752,751,750,749,748,747,746,745,744,743,742,741,740,739,738,737,736,735,734,733,732,731,730,729,728,727,726,725,724,723,722,721,720,719,718,717,716,715,714,713,712,711,710,709,708,707,706,705,704,703,702,701,700,699,698,697,696,695,694,693,692,691,690,689,688,687,686,685,684,683,682,681,680,679,678,677,676,675,674,673,672,671,670,669,668,667,666,665,664,663,662,661,660,659,658,657,656,655,654,653,652,651,650,649,648,647,646,645,644,643,642,641,640,639,638,637,636,635] in bucket "default" from node 'ns_1@10.2.1.100' to node 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:39:25 =============================================================================== Pushing config done INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210","-d","10.2.1.101:11210","-A", "-v","-b","1023","-b","1022","-b","1021","-b","1020","-b","1019","-b", "1018","-b","1017","-b","1016","-b","1015","-b","1014","-b","1013","-b", "1012","-b","1011","-b","1010","-b","1009","-b","1008","-b","1007","-b", "1006","-b","1005","-b","1004","-b","1003","-b","1002","-b","1001","-b", "1000","-b","999","-b","998","-b","997","-b","996","-b","995","-b","994", "-b","993","-b","992","-b","991","-b","990","-b","989","-b","988","-b", "987","-b","986","-b","985","-b","984","-b","983","-b","982","-b","981", "-b","980","-b","979","-b","978","-b","977","-b","976","-b","975","-b", "974","-b","973","-b","972","-b","971","-b","970","-b","969","-b","968", "-b","967","-b","966","-b","965","-b","964","-b","963","-b","962","-b", "961","-b","960","-b","959","-b","958","-b","957","-b","956","-b","955", "-b","954","-b","953","-b","952","-b","951","-b","950","-b","949","-b", "948","-b","947","-b","946","-b","945","-b","944","-b","943","-b","942", "-b","941","-b","940","-b","939","-b","938","-b","937","-b","936","-b", "935","-b","934","-b","933","-b","932","-b","931","-b","930","-b","929", "-b","928","-b","927","-b","926","-b","925","-b","924","-b","923","-b", "922","-b","921","-b","920","-b","919","-b","918","-b","917","-b","916", "-b","915","-b","914","-b","913","-b","912","-b","911","-b","910","-b", "909","-b","908","-b","907","-b","906","-b","905","-b","904","-b","903", "-b","902","-b","901","-b","900","-b","899","-b","898","-b","897","-b", "896","-b","895","-b","894","-b","893","-b","892","-b","891","-b","890", "-b","889","-b","888","-b","887","-b","886","-b","885","-b","884","-b", "883","-b","882","-b","881","-b","880","-b","879","-b","878","-b","877", "-b","876","-b","875","-b","874","-b","873","-b","872","-b","871","-b", "870","-b","869","-b","868","-b","867","-b","866","-b","865","-b","864", "-b","863","-b","862","-b","861","-b","860","-b","859","-b","858","-b", "857","-b","856","-b","855","-b","854","-b","853","-b","852","-b","851", "-b","850","-b","849","-b","848","-b","847","-b","846","-b","845","-b", "844","-b","843","-b","842","-b","841","-b","840","-b","839","-b","838", "-b","837","-b","836","-b","835","-b","834","-b","833","-b","832","-b", "831","-b","830","-b","829","-b","828","-b","827","-b","826","-b","825", "-b","824","-b","823","-b","822","-b","821","-b","820","-b","819","-b", "818","-b","817","-b","816","-b","815","-b","814","-b","813","-b","812", "-b","811","-b","810","-b","809","-b","808","-b","807","-b","806","-b", "805","-b","804","-b","803","-b","802","-b","801","-b","800","-b","799", "-b","798","-b","797","-b","796","-b","795","-b","794","-b","793","-b", "792","-b","791","-b","790","-b","789","-b","788","-b","787","-b","786", "-b","785","-b","784","-b","783","-b","782","-b","781","-b","780","-b", "779","-b","778","-b","777","-b","776","-b","775","-b","774","-b","773", "-b","772","-b","771","-b","770","-b","769","-b","768","-b","767","-b", "766","-b","765","-b","764","-b","763","-b","762","-b","761","-b","760", "-b","759","-b","758","-b","757","-b","756","-b","755","-b","754","-b", "753","-b","752","-b","751","-b","750","-b","749","-b","748","-b","747", "-b","746","-b","745","-b","744","-b","743","-b","742","-b","741","-b", "740","-b","739","-b","738","-b","737","-b","736","-b","735","-b","734", "-b","733","-b","732","-b","731","-b","730","-b","729","-b","728","-b", "727","-b","726","-b","725","-b","724","-b","723","-b","722","-b","721", "-b","720","-b","719","-b","718","-b","717","-b","716","-b","715","-b", "714","-b","713","-b","712","-b","711","-b","710","-b","709","-b","708", "-b","707","-b","706","-b","705","-b","704","-b","703","-b","702","-b", "701","-b","700","-b","699","-b","698","-b","697","-b","696","-b","695", "-b","694","-b","693","-b","692","-b","691","-b","690","-b","689","-b", "688","-b","687","-b","686","-b","685","-b","684","-b","683","-b","682", "-b","681","-b","680","-b","679","-b","678","-b","677","-b","676","-b", "675","-b","674","-b","673","-b","672","-b","671","-b","670","-b","669", "-b","668","-b","667","-b","666","-b","665","-b","664","-b","663","-b", "662","-b","661","-b","660","-b","659","-b","658","-b","657","-b","656", "-b","655","-b","654","-b","653","-b","652","-b","651","-b","650","-b", "649","-b","648","-b","647","-b","646","-b","645","-b","644","-b","643", "-b","642","-b","641","-b","640","-b","639","-b","638","-b","637","-b", "636","-b","635"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] PROGRESS REPORT <0.260.0> 2011-01-03 13:39:25 =============================================================================== supervisor {local,'ns_vbm_sup-default'} started [{pid,<0.10700.0>}, {name,{child_id,[1023,1022,1021,1020,1019,1018,1017,1016,1015,1014, 1013,1012,1011,1010,1009,1008,1007,1006,1005,1004, 1003,1002,1001,1000,999,998,997,996,995,994,993, 992,991,990,989,988,987,986,985,984,983,982,981, 980,979,978,977,976,975,974,973,972,971,970,969, 968,967,966,965,964,963,962,961,960,959,958,957, 956,955,954,953,952,951,950,949,948,947,946,945, 944,943,942,941,940,939,938,937,936,935,934,933, 932,931,930,929,928,927,926,925,924,923,922,921, 920,919,918,917,916,915,914,913,912,911,910,909, 908,907,906,905,904,903,902,901,900,899,898,897, 896,895,894,893,892,891,890,889,888,887,886,885, 884,883,882,881,880,879,878,877,876,875,874,873, 872,871,870,869,868,867,866,865,864,863,862,861, 860,859,858,857,856,855,854,853,852,851,850,849, 848,847,846,845,844,843,842,841,840,839,838,837, 836,835,834,833,832,831,830,829,828,827,826,825, 824,823,822,821,820,819,818,817,816,815,814,813, 812,811,810,809,808,807,806,805,804,803,802,801, 800,799,798,797,796,795,794,793,792,791,790,789, 788,787,786,785,784,783,782,781,780,779,778,777, 776,775,774,773,772,771,770,769,768,767,766,765, 764,763,762,761,760,759,758,757,756,755,754,753, 752,751,750,749,748,747,746,745,744,743,742,741, 740,739,738,737,736,735,734,733,732,731,730,729, 728,727,726,725,724,723,722,721,720,719,718,717, 716,715,714,713,712,711,710,709,708,707,706,705, 704,703,702,701,700,699,698,697,696,695,694,693, 692,691,690,689,688,687,686,685,684,683,682,681, 680,679,678,677,676,675,674,673,672,671,670,669, 668,667,666,665,664,663,662,661,660,659,658,657, 656,655,654,653,652,651,650,649,648,647,646,645, 644,643,642,641,640,639,638,637,636,635], 'ns_1@10.2.1.101'}}, {mfa,{ns_port_server,start_link, [vbucketmigrator, "./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210", "-d","10.2.1.101:11210","-A","-v","-b", "1023","-b","1022","-b","1021","-b","1020", "-b","1019","-b","1018","-b","1017","-b", "1016","-b","1015","-b","1014","-b","1013", "-b","1012","-b","1011","-b","1010","-b", "1009","-b","1008","-b","1007","-b","1006", "-b","1005","-b","1004","-b","1003","-b", "1002","-b","1001","-b","1000","-b","999", "-b","998","-b","997","-b","996","-b","995", "-b","994","-b","993","-b","992","-b","991", "-b","990","-b","989","-b","988","-b","987", "-b","986","-b","985","-b","984","-b","983", "-b","982","-b","981","-b","980","-b","979", "-b","978","-b","977","-b","976","-b","975", "-b","974","-b","973","-b","972","-b","971", "-b","970","-b","969","-b","968","-b","967", "-b","966","-b","965","-b","964","-b","963", "-b","962","-b","961","-b","960","-b","959", "-b","958","-b","957","-b","956","-b","955", "-b","954","-b","953","-b","952","-b","951", "-b","950","-b","949","-b","948","-b","947", "-b","946","-b","945","-b","944","-b","943", "-b","942","-b","941","-b","940","-b","939", "-b","938","-b","937","-b","936","-b","935", "-b","934","-b","933","-b","932","-b","931", "-b","930","-b","929","-b","928","-b","927", "-b","926","-b","925","-b","924","-b","923", "-b","922","-b","921","-b","920","-b","919", "-b","918","-b","917","-b","916","-b","915", "-b","914","-b","913","-b","912","-b","911", "-b","910","-b","909","-b","908","-b","907", "-b","906","-b","905","-b","904","-b","903", "-b","902","-b","901","-b","900","-b","899", "-b","898","-b","897","-b","896","-b","895", "-b","894","-b","893","-b","892","-b","891", "-b","890","-b","889","-b","888","-b","887", "-b","886","-b","885","-b","884","-b","883", "-b","882","-b","881","-b","880","-b","879", "-b","878","-b","877","-b","876","-b","875", "-b","874","-b","873","-b","872","-b","871", "-b","870","-b","869","-b","868","-b","867", "-b","866","-b","865","-b","864","-b","863", "-b","862","-b","861","-b","860","-b","859", "-b","858","-b","857","-b","856","-b","855", "-b","854","-b","853","-b","852","-b","851", "-b","850","-b","849","-b","848","-b","847", "-b","846","-b","845","-b","844","-b","843", "-b","842","-b","841","-b","840","-b","839", "-b","838","-b","837","-b","836","-b","835", "-b","834","-b","833","-b","832","-b","831", "-b","830","-b","829","-b","828","-b","827", "-b","826","-b","825","-b","824","-b","823", "-b","822","-b","821","-b","820","-b","819", "-b","818","-b","817","-b","816","-b","815", "-b","814","-b","813","-b","812","-b","811", "-b","810","-b","809","-b","808","-b","807", "-b","806","-b","805","-b","804","-b","803", "-b","802","-b","801","-b","800","-b","799", "-b","798","-b","797","-b","796","-b","795", "-b","794","-b","793","-b","792","-b","791", "-b","790","-b","789","-b","788","-b","787", "-b","786","-b","785","-b","784","-b","783", "-b","782","-b","781","-b","780","-b","779", "-b","778","-b","777","-b","776","-b","775", "-b","774","-b","773","-b","772","-b","771", "-b","770","-b","769","-b","768","-b","767", "-b","766","-b","765","-b","764","-b","763", "-b","762","-b","761","-b","760","-b","759", "-b","758","-b","757","-b","756","-b","755", "-b","754","-b","753","-b","752","-b","751", "-b","750","-b","749","-b","748","-b","747", "-b","746","-b","745","-b","744","-b","743", "-b","742","-b","741","-b","740","-b","739", "-b","738","-b","737","-b","736","-b","735", "-b","734","-b","733","-b","732","-b","731", "-b","730","-b","729","-b","728","-b","727", "-b","726","-b","725","-b","724","-b","723", "-b","722","-b","721","-b","720","-b","719", "-b","718","-b","717","-b","716","-b","715", "-b","714","-b","713","-b","712","-b","711", "-b","710","-b","709","-b","708","-b","707", "-b","706","-b","705","-b","704","-b","703", "-b","702","-b","701","-b","700","-b","699", "-b","698","-b","697","-b","696","-b","695", "-b","694","-b","693","-b","692","-b","691", "-b","690","-b","689","-b","688","-b","687", "-b","686","-b","685","-b","684","-b","683", "-b","682","-b","681","-b","680","-b","679", "-b","678","-b","677","-b","676","-b","675", "-b","674","-b","673","-b","672","-b","671", "-b","670","-b","669","-b","668","-b","667", "-b","666","-b","665","-b","664","-b","663", "-b","662","-b","661","-b","660","-b","659", "-b","658","-b","657","-b","656","-b","655", "-b","654","-b","653","-b","652","-b","651", "-b","650","-b","649","-b","648","-b","647", "-b","646","-b","645","-b","644","-b","643", "-b","642","-b","641","-b","640","-b","639", "-b","638","-b","637","-b","636","-b","635"], [use_stdio,stderr_to_stdout, {write_data,[[],"\n"]}]]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [511,510,509,508,507,506,505,504,503,502,501,500,499,498,497,496,495,494,493,492,491,490,489,488,487,486,485,484,483,482,481,480,479,478,477,476,475,474,473,472,471,470,469,468,467,466,465,464,463,462,461,460,459,458,457,456,455,454,453,452,451,450,449,448,447,446,445,444,443,442,441,440,439,438,437,436,435,434,433,432,431,430,429,428,427,426,425,424,423,422,421,420,419,418,417,416,415,414,413,412,411,410,409,408,407,406,405,404,403,402,401,400,399,398,397,396,395,394,393,392,391,390,389,388,387,386,385,384,383,382,381,380,379,378,377,376,375,374,373,372,371,370,369,368,367,366,365,364,363,362,361,360,359,358,357,356,355,354,353,352,351,350,349,348,347,346,345,344,343,342,341,340,339,338,337,336,335,334,333,332,331,330,329,328,327,326,325,324,323,322,321,320,319,318,317,316,315,314,313,312,311,310,309,308,307,306,305,304,303,302,301,300,299,298,297,296,295,294,293,292,291,290,289,288,287,286,285,284,283,282,281,280,279,278,277,276,275,274,273,272,271,270,269,268,267,266,265,264,263,262,261,260,259,258,257,256,255,254,253,252,251,250,249,248,247,246,245,244,243,242,241,240,239,238,237,236,235,234,233,232,231,230,229,228,227,226,225,224,223,222,221,220,219,218,217,216,215,214,213,212,211,210,209,208,207,206,205,204,203,202,201,200,199,198,197,196,195,194,193,192,191,190,189,188,187,186,185,184,183,182,181,180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165,164,163,162,161,160,159,158,157,156,155,154,153,152,151,150,149,148,147,146,145,144,143,142,141,140,139,138,137,136,135,134,133,132,131,130,129,128,127,126,125,124,123,122,121,120,119,118,117,116,115,114,113,112,111,110,109,108,107,106,105,104,103,102,101,100,99,98,97,96,95,94,93,92,91,90,89,88,87,86,85,84,83,82,81,80,79,78,77,76,75,74,73,72,71] in bucket "default" from node 'ns_1@10.2.1.101' to node 'ns_1@10.2.1.100' INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.101:11210","-d","10.2.1.100:11210","-A", "-v","-b","511","-b","510","-b","509","-b","508","-b","507","-b","506","-b", "505","-b","504","-b","503","-b","502","-b","501","-b","500","-b","499", "-b","498","-b","497","-b","496","-b","495","-b","494","-b","493","-b", "492","-b","491","-b","490","-b","489","-b","488","-b","487","-b","486", "-b","485","-b","484","-b","483","-b","482","-b","481","-b","480","-b", "479","-b","478","-b","477","-b","476","-b","475","-b","474","-b","473", "-b","472","-b","471","-b","470","-b","469","-b","468","-b","467","-b", "466","-b","465","-b","464","-b","463","-b","462","-b","461","-b","460", "-b","459","-b","458","-b","457","-b","456","-b","455","-b","454","-b", "453","-b","452","-b","451","-b","450","-b","449","-b","448","-b","447", "-b","446","-b","445","-b","444","-b","443","-b","442","-b","441","-b", "440","-b","439","-b","438","-b","437","-b","436","-b","435","-b","434", "-b","433","-b","432","-b","431","-b","430","-b","429","-b","428","-b", "427","-b","426","-b","425","-b","424","-b","423","-b","422","-b","421", "-b","420","-b","419","-b","418","-b","417","-b","416","-b","415","-b", "414","-b","413","-b","412","-b","411","-b","410","-b","409","-b","408", "-b","407","-b","406","-b","405","-b","404","-b","403","-b","402","-b", "401","-b","400","-b","399","-b","398","-b","397","-b","396","-b","395", "-b","394","-b","393","-b","392","-b","391","-b","390","-b","389","-b", "388","-b","387","-b","386","-b","385","-b","384","-b","383","-b","382", "-b","381","-b","380","-b","379","-b","378","-b","377","-b","376","-b", "375","-b","374","-b","373","-b","372","-b","371","-b","370","-b","369", "-b","368","-b","367","-b","366","-b","365","-b","364","-b","363","-b", "362","-b","361","-b","360","-b","359","-b","358","-b","357","-b","356", "-b","355","-b","354","-b","353","-b","352","-b","351","-b","350","-b", "349","-b","348","-b","347","-b","346","-b","345","-b","344","-b","343", "-b","342","-b","341","-b","340","-b","339","-b","338","-b","337","-b", "336","-b","335","-b","334","-b","333","-b","332","-b","331","-b","330", "-b","329","-b","328","-b","327","-b","326","-b","325","-b","324","-b", "323","-b","322","-b","321","-b","320","-b","319","-b","318","-b","317", "-b","316","-b","315","-b","314","-b","313","-b","312","-b","311","-b", "310","-b","309","-b","308","-b","307","-b","306","-b","305","-b","304", "-b","303","-b","302","-b","301","-b","300","-b","299","-b","298","-b", "297","-b","296","-b","295","-b","294","-b","293","-b","292","-b","291", "-b","290","-b","289","-b","288","-b","287","-b","286","-b","285","-b", "284","-b","283","-b","282","-b","281","-b","280","-b","279","-b","278", "-b","277","-b","276","-b","275","-b","274","-b","273","-b","272","-b", "271","-b","270","-b","269","-b","268","-b","267","-b","266","-b","265", "-b","264","-b","263","-b","262","-b","261","-b","260","-b","259","-b", "258","-b","257","-b","256","-b","255","-b","254","-b","253","-b","252", "-b","251","-b","250","-b","249","-b","248","-b","247","-b","246","-b", "245","-b","244","-b","243","-b","242","-b","241","-b","240","-b","239", "-b","238","-b","237","-b","236","-b","235","-b","234","-b","233","-b", "232","-b","231","-b","230","-b","229","-b","228","-b","227","-b","226", "-b","225","-b","224","-b","223","-b","222","-b","221","-b","220","-b", "219","-b","218","-b","217","-b","216","-b","215","-b","214","-b","213", "-b","212","-b","211","-b","210","-b","209","-b","208","-b","207","-b", "206","-b","205","-b","204","-b","203","-b","202","-b","201","-b","200", "-b","199","-b","198","-b","197","-b","196","-b","195","-b","194","-b", "193","-b","192","-b","191","-b","190","-b","189","-b","188","-b","187", "-b","186","-b","185","-b","184","-b","183","-b","182","-b","181","-b", "180","-b","179","-b","178","-b","177","-b","176","-b","175","-b","174", "-b","173","-b","172","-b","171","-b","170","-b","169","-b","168","-b", "167","-b","166","-b","165","-b","164","-b","163","-b","162","-b","161", "-b","160","-b","159","-b","158","-b","157","-b","156","-b","155","-b", "154","-b","153","-b","152","-b","151","-b","150","-b","149","-b","148", "-b","147","-b","146","-b","145","-b","144","-b","143","-b","142","-b", "141","-b","140","-b","139","-b","138","-b","137","-b","136","-b","135", "-b","134","-b","133","-b","132","-b","131","-b","130","-b","129","-b", "128","-b","127","-b","126","-b","125","-b","124","-b","123","-b","122", "-b","121","-b","120","-b","119","-b","118","-b","117","-b","116","-b", "115","-b","114","-b","113","-b","112","-b","111","-b","110","-b","109", "-b","108","-b","107","-b","106","-b","105","-b","104","-b","103","-b", "102","-b","101","-b","100","-b","99","-b","98","-b","97","-b","96","-b", "95","-b","94","-b","93","-b","92","-b","91","-b","90","-b","89","-b","88", "-b","87","-b","86","-b","85","-b","84","-b","83","-b","82","-b","81","-b", "80","-b","79","-b","78","-b","77","-b","76","-b","75","-b","74","-b","73", "-b","72","-b","71"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] INFO REPORT <0.10666.0> 2011-01-03 13:39:25 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [634,631,630,627,626,623,622,619,618,615,614,611,610,607,606,603,602,599,598,595,594,591,590,587,586,583,582,579,578,575,574,571,570,567,566,563,562,559,558,555,554,551,550,547,546,543,542,539,538,535,534,531,530,527,526,523,522,519,518,515,514,70,69,66,65,62,61,58,57,54,53] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.100' INFO REPORT <0.10700.0> 2011-01-03 13:39:27 =============================================================================== vbucketmigrator<0.10700.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.10700.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.10700.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.10700.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.10700.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.10700.0>: Authenticated towards: {Sock 10.2.1.100:11210} INFO REPORT <0.105.0> 2011-01-03 13:39:27 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:39:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.10666.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0]], [['ns_1@10.2.1.101'| 0.0]], [['ns_1@10.2.1.102'| 0.0]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.105.0> 2011-01-03 13:39:29 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:30 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:32 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.10666.0> 2011-01-03 13:39:33 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.102:11210","-d","10.2.1.100:11210","-A", "-v","-b","634","-b","631","-b","630","-b","627","-b","626","-b","623","-b", "622","-b","619","-b","618","-b","615","-b","614","-b","611","-b","610", "-b","607","-b","606","-b","603","-b","602","-b","599","-b","598","-b", "595","-b","594","-b","591","-b","590","-b","587","-b","586","-b","583", "-b","582","-b","579","-b","578","-b","575","-b","574","-b","571","-b", "570","-b","567","-b","566","-b","563","-b","562","-b","559","-b","558", "-b","555","-b","554","-b","551","-b","550","-b","547","-b","546","-b", "543","-b","542","-b","539","-b","538","-b","535","-b","534","-b","531", "-b","530","-b","527","-b","526","-b","523","-b","522","-b","519","-b", "518","-b","515","-b","514","-b","70","-b","69","-b","66","-b","65","-b", "62","-b","61","-b","58","-b","57","-b","54","-b","53"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] INFO REPORT <0.10666.0> 2011-01-03 13:39:33 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:39:33 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.105.0> 2011-01-03 13:39:34 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:35 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:37 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:39:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.10666.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0]], [['ns_1@10.2.1.101'| 0.0]], [['ns_1@10.2.1.102'| 0.0]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.110.0> 2011-01-03 13:39:38 =============================================================================== ns_log: logging ns_orchestrator:2:Rebalance exited with reason {{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,633}, 30000]}} INFO REPORT <0.65.0> 2011-01-03 13:39:38 =============================================================================== config change: rebalance_status -> {none,<<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>} INFO REPORT <0.65.0> 2011-01-03 13:39:38 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:39:38 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:39:39 =============================================================================== Pushing config done INFO REPORT <0.105.0> 2011-01-03 13:39:39 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:41 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:45 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:46 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:47 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10767.0> 2011-01-03 13:39:48 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 53 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:39:49 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.65.0> 2011-01-03 13:39:50 =============================================================================== config change: {node,'ns_1@10.2.1.102',membership} -> active INFO REPORT <0.110.0> 2011-01-03 13:39:50 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason shutdown INFO REPORT <0.110.0> 2011-01-03 13:39:50 =============================================================================== ns_log: logging ns_orchestrator:4:Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] INFO REPORT <0.72.0> 2011-01-03 13:39:50 =============================================================================== ns_log: suppressing duplicate log ns_orchestrator:4("Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101',\n 'ns_1@10.2.1.102'], EjectNodes = []\n") because it's been seen 1 times in the past 25.257 secs (last seen 25.257 secs ago INFO REPORT <0.65.0> 2011-01-03 13:39:50 =============================================================================== config change: {node,'ns_1@10.2.1.101',membership} -> active INFO REPORT <0.110.0> 2011-01-03 13:39:50 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.10772.0>, {dict, 0, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.65.0> 2011-01-03 13:39:50 =============================================================================== config change: {node,'ns_1@10.2.1.100',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:39:50 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:39:50 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:39:50 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 13:39:50 =============================================================================== config change: rebalance_status -> running INFO REPORT <0.65.0> 2011-01-03 13:39:50 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:39:50 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:39:50 =============================================================================== Pushing config done INFO REPORT <0.105.0> 2011-01-03 13:39:51 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 54 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:39:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:39:56 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.10772.0> 2011-01-03 13:39:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:39:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 57 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:39:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.10772.0> 2011-01-03 13:39:58 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 58 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:39:58 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:39:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.10772.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0]], [['ns_1@10.2.1.101'| 0.0]], [['ns_1@10.2.1.102'| 0.0]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.105.0> 2011-01-03 13:39:59 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.10772.0> 2011-01-03 13:40:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:40:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:40:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 61 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:40:01 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.10772.0> 2011-01-03 13:40:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 62 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:40:03 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:40:05 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:40:06 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:40:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.10772.0> 2011-01-03 13:40:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:40:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10772.0> 2011-01-03 13:40:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 65 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.93.0> 2011-01-03 13:40:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90807,406401}}, {active_buckets,["default"]}, {memory, [{total,22481816}, {processes,14243716}, {processes_used,14224468}, {system,8238100}, {atom,560301}, {atom_used,557531}, {binary,200064}, {code,4570913}, {ets,1485844}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2819}, {memory_data,{4284698624,4237369344,{<0.10525.0>,6731832}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,79544320}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2810395,0}}, {context_switches,{849788,0}}, {garbage_collection,{152691,1709436811,0}}, {io,{{input,84995724},{output,42163947}}}, {reductions,{686058074,1666722}}, {run_queue,0}, {runtime,{39187,140}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90807,406402}}, {active_buckets,["default"]}, {memory, [{total,18848392}, {processes,10542828}, {processes_used,10533708}, {system,8305564}, {atom,559813}, {atom_used,556363}, {binary,310856}, {code,4551541}, {ets,1469876}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2749}, {memory_data,{4284698624,4230397952,{<11993.387.0>,4599840}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,74891264}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2743203,2307}}, {context_switches,{491419,0}}, {garbage_collection,{114096,1065255011,0}}, {io,{{input,64753138},{output,32594563}}}, {reductions,{326091930,1207272}}, {run_queue,0}, {runtime,{24492,110}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90808,170400}}, {active_buckets,["default"]}, {memory, [{total,15356712}, {processes,7650644}, {processes_used,7637676}, {system,7706068}, {atom,541077}, {atom_used,528868}, {binary,403672}, {code,4280811}, {ets,1068164}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1738}, {memory_data,{4284698624,3170402304,{<10870.218.0>,1086308}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,1095217152}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1730316,0}}, {context_switches,{175005,0}}, {garbage_collection,{49523,245811225,0}}, {io,{{input,19074899},{output,15649065}}}, {reductions,{99978017,691846}}, {run_queue,0}, {runtime,{9048,141}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:40:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.10772.0>, {dict, 3, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[['ns_1@10.2.1.100'| 0.0]], [['ns_1@10.2.1.101'| 0.0]], [['ns_1@10.2.1.102'| 0.0]], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.105.0> 2011-01-03 13:40:08 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:40:10 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:40:12 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:40:13 =============================================================================== ns_log: logging ns_orchestrator:2:Rebalance exited with reason {{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,65}, 30000]}} INFO REPORT <0.65.0> 2011-01-03 13:40:13 =============================================================================== config change: rebalance_status -> {none,<<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>} INFO REPORT <0.65.0> 2011-01-03 13:40:13 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:40:13 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:40:13 =============================================================================== Pushing config done INFO REPORT <0.105.0> 2011-01-03 13:40:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:40:14 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:40:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10870.0> 2011-01-03 13:40:18 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 66 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:40:21 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' CRASH REPORT <0.10870.0> 2011-01-03 13:40:23 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.10870.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,66}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_janitor,do_sanify_chain,5}, {ns_janitor,sanify_chain,5}, {ns_janitor,'-sanify/3-lc$^0/1-0-',4}, {ns_janitor,'-sanify/3-lc$^0/1-0-',4}, {ns_janitor,cleanup,1}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 75025 stack_size 24 reductions 173247 INFO REPORT <0.110.0> 2011-01-03 13:40:23 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 66}, 30000]}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10886.0> 2011-01-03 13:40:24 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 69 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:40:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.10886.0>} CRASH REPORT <0.10886.0> 2011-01-03 13:40:29 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.10886.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,69}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_janitor,do_sanify_chain,5}, {ns_janitor,sanify_chain,5}, {ns_janitor,'-sanify/3-lc$^0/1-0-',4}, {ns_janitor,'-sanify/3-lc$^0/1-0-',4}, {ns_janitor,cleanup,1}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 75025 stack_size 24 reductions 180536 INFO REPORT <0.110.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 69}, 30000]}} INFO REPORT <0.110.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason shutdown INFO REPORT <0.110.0> 2011-01-03 13:40:29 =============================================================================== ns_log: logging ns_orchestrator:4:Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101', 'ns_1@10.2.1.102'], EjectNodes = [] INFO REPORT <0.72.0> 2011-01-03 13:40:29 =============================================================================== ns_log: suppressing duplicate log ns_orchestrator:4("Starting rebalance, KeepNodes = ['ns_1@10.2.1.100','ns_1@10.2.1.101',\n 'ns_1@10.2.1.102'], EjectNodes = []\n") because it's been seen 2 times in the past 64.772 secs (last seen 39.515 secs ago INFO REPORT <0.110.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state rebalancing: {rebalancing_state, <0.10903.0>, {dict, 0, 16, 16, 8, 80, 48, {[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}, {{[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []}}}} INFO REPORT <0.65.0> 2011-01-03 13:40:29 =============================================================================== config change: {node,'ns_1@10.2.1.102',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:40:29 =============================================================================== config change: {node,'ns_1@10.2.1.101',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:40:29 =============================================================================== config change: {node,'ns_1@10.2.1.100',membership} -> active INFO REPORT <0.65.0> 2011-01-03 13:40:29 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:40:29 =============================================================================== Pushing config INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.85.0> 2011-01-03 13:40:29 =============================================================================== Pushing config done INFO REPORT <0.65.0> 2011-01-03 13:40:29 =============================================================================== config change: rebalance_status -> running INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.65.0> 2011-01-03 13:40:29 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:40:29 =============================================================================== Pushing config INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10903.0> 2011-01-03 13:40:30 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 70 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:40:30 =============================================================================== Pushing config done INFO REPORT <0.110.0> 2011-01-03 13:40:35 =============================================================================== ns_log: logging ns_orchestrator:2:Rebalance exited with reason {{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,70}, 30000]}} INFO REPORT <0.65.0> 2011-01-03 13:40:35 =============================================================================== config change: rebalance_status -> {none,<<"Rebalance failed. See logs for detailed reason. You can try rebalance again.">>} INFO REPORT <0.65.0> 2011-01-03 13:40:35 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.85.0> 2011-01-03 13:40:35 =============================================================================== Pushing config INFO REPORT <0.85.0> 2011-01-03 13:40:35 =============================================================================== Pushing config done INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 71 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 72 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 73 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 74 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 75 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 76 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 77 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 78 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 79 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 80 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 81 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 82 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 83 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 84 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 85 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 86 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 87 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 88 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 89 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 90 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 91 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 92 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 93 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 94 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 95 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 96 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 97 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 98 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 99 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 100 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 101 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 102 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 103 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 104 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 105 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 106 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 107 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 108 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 109 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 110 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 111 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 112 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 113 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 114 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 115 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 116 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 117 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 118 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 119 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 120 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 121 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 122 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 123 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 124 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 125 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 126 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 127 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 128 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 129 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 130 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 131 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 132 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 133 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 134 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 135 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 136 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 137 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 138 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 139 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 140 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 141 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 142 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 143 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 144 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 145 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 146 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 147 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 148 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 149 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 150 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 151 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 152 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 153 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 154 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 155 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 156 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 157 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 158 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 159 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 160 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 161 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.100' in "default" on 162 from replica to dead because we don't have all copies INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 512 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 513 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:38 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 514 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:40:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 515 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:40:41 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:40:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 516 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 517 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 518 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:40:46 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 519 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:40:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.10955.0>} INFO REPORT <0.10955.0> 2011-01-03 13:40:49 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 520 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:49 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 521 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:49 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 522 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.259.0> 2011-01-03 13:40:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 693 auth_errors 0 bucket_conns 4 bytes_read 6447927907 bytes_written 198679524 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 4548601 conn_yields 10588 connection_structures 235 curr_connections 26 curr_items 2825780 curr_items_tot 4875251 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 5121 ep_commit_time 0 ep_commit_time_total 1520 ep_data_age 15 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 3 ep_flush_duration_highwat 297 ep_flush_duration_total 2012 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 803643 ep_io_num_write 4693971 ep_io_read_bytes 1017164475 ep_io_write_bytes 4649399717 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2641598165 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 645143 ep_num_eject_failures 1825863 ep_num_eject_replicas 881954 ep_num_expiry_pager_runs 0 ep_num_non_resident 1774153 ep_num_not_my_vbuckets 854355 ep_num_pager_runs 7 ep_num_value_ejects 1965184 ep_oom_errors 0 ep_overhead 59263777 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 500000 ep_storage_age 0 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 797387 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5310992245 ep_total_del_items 0 ep_total_enqueued 5194331 ep_total_new_items 4688278 ep_total_persisted 4693971 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2700861942 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 688 tap_mutation_received 2323301 tap_mutation_sent 3579249 tap_opaque_received 1982 tap_opaque_sent 2490 tap_vbucket_set_sent 1367 threads 4 time 1294090851 total_connections 1407 uptime 2865 version 1.4.4_304_g7d5a132 INFO REPORT <0.10955.0> 2011-01-03 13:40:52 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 523 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:40:54 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 524 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:54 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 525 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:40:54 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 526 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:40:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 527 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:40:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.10955.0>} INFO REPORT <0.10955.0> 2011-01-03 13:41:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 528 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 529 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 530 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:41:02 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 531 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:41:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 532 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 533 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 534 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.93.0> 2011-01-03 13:41:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90867,404401}}, {active_buckets,["default"]}, {memory, [{total,21996808}, {processes,13643372}, {processes_used,13623668}, {system,8353436}, {atom,560301}, {atom_used,557531}, {binary,280168}, {code,4570913}, {ets,1521388}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2879}, {memory_data,{4284698624,4181524480,{<0.299.0>,7442428}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,56274944}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2870393,0}}, {context_switches,{869190,0}}, {garbage_collection,{155793,1750273882,0}}, {io,{{input,87864019},{output,43286196}}}, {reductions,{702892711,766481}}, {run_queue,0}, {runtime,{40341,15}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90867,404402}}, {active_buckets,["default"]}, {memory, [{total,18575968}, {processes,10240324}, {processes_used,10230260}, {system,8335644}, {atom,559813}, {atom_used,556363}, {binary,309472}, {code,4551541}, {ets,1501948}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2809}, {memory_data,{4284698624,4225490944,{<11993.387.0>,4599840}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,33730560}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2803030,2136}}, {context_switches,{502581,0}}, {garbage_collection,{116006,1090344881,0}}, {io,{{input,66958716},{output,33947094}}}, {reductions,{334558489,823971}}, {run_queue,1}, {runtime,{25287,62}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90868,168400}}, {active_buckets,["default"]}, {memory, [{total,15129192}, {processes,7472692}, {processes_used,7459724}, {system,7656500}, {atom,541077}, {atom_used,528868}, {binary,320128}, {code,4280811}, {ets,1102140}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1798}, {memory_data,{4284698624,3194523648,{<10870.307.0>,3328596}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,949645312}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1790314,0}}, {context_switches,{181690,0}}, {garbage_collection,{51277,256786371,0}}, {io,{{input,19398135},{output,16300680}}}, {reductions,{103977239,594107}}, {run_queue,0}, {runtime,{9859,125}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:41:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.10955.0>} INFO REPORT <0.10955.0> 2011-01-03 13:41:09 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 535 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:41:11 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 536 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:11 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 537 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:11 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 538 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:41:13 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.10955.0> 2011-01-03 13:41:14 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 539 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:41:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 540 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 541 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 542 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:41:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.10955.0>} INFO REPORT <0.10955.0> 2011-01-03 13:41:19 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 543 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:41:22 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 544 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:22 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 545 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:22 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 546 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:41:25 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:41:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 547 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:41:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.10955.0>} INFO REPORT <0.10955.0> 2011-01-03 13:41:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 548 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 549 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 550 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:41:31 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 551 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.10955.0> 2011-01-03 13:41:34 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 552 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:34 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 553 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.10955.0> 2011-01-03 13:41:34 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 554 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:41:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.10955.0>} CRASH REPORT <0.10955.0> 2011-01-03 13:41:39 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.10955.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,554}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_janitor,do_sanify_chain,5}, {ns_janitor,sanify_chain,5}, {ns_janitor,'-sanify/3-lc$^0/1-0-',4}, {ns_janitor,'-sanify/3-lc$^0/1-0-',4}, {ns_janitor,cleanup,1}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 6765 stack_size 24 reductions 1348913 INFO REPORT <0.110.0> 2011-01-03 13:41:39 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 554}, 30000]}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 512 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 513 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 516 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 517 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 520 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 521 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 524 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 525 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 528 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 529 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 532 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 533 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 536 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 537 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 540 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 541 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 544 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 545 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 548 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 549 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 552 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 553 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:40 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 555 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:41:44 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 556 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:44 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 557 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:44 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 558 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:41:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 559 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:41:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11110.0>} INFO REPORT <0.11110.0> 2011-01-03 13:41:51 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 560 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:51 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 561 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:51 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 562 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:41:53 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 563 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:41:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 564 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 565 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:41:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 566 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:41:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11110.0>} INFO REPORT <0.11110.0> 2011-01-03 13:41:59 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 567 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:42:01 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.11110.0> 2011-01-03 13:42:01 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 568 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:01 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 569 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:01 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 570 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:04 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 571 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:06 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 572 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:06 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 573 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:06 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 574 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.93.0> 2011-01-03 13:42:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90927,402401}}, {active_buckets,["default"]}, {memory, [{total,22770552}, {processes,14482956}, {processes_used,14463252}, {system,8287596}, {atom,560301}, {atom_used,557531}, {binary,179096}, {code,4570913}, {ets,1556820}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2939}, {memory_data,{4284698624,4236230656,{<0.299.0>,6656756}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,71270400}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2930391,0}}, {context_switches,{881838,0}}, {garbage_collection,{158219,1776613143,0}}, {io,{{input,90191926},{output,45336550}}}, {reductions,{713589105,794622}}, {run_queue,0}, {runtime,{40965,0}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90927,402402}}, {active_buckets,["default"]}, {memory, [{total,25452000}, {processes,16799876}, {processes_used,16790916}, {system,8652124}, {atom,559813}, {atom_used,556363}, {binary,629560}, {code,4551541}, {ets,1497612}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2869}, {memory_data,{4284698624,4249595904,{<11993.387.0>,4599840}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,75603968}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2861594,702}}, {context_switches,{512010,0}}, {garbage_collection,{117629,1111240470,0}}, {io,{{input,68471685},{output,34272861}}}, {reductions,{342634360,653938}}, {run_queue,0}, {runtime,{26083,78}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90928,166400}}, {active_buckets,["default"]}, {memory, [{total,16427504}, {processes,8742588}, {processes_used,8729620}, {system,7684916}, {atom,541077}, {atom_used,528868}, {binary,311592}, {code,4280811}, {ets,1139308}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1858}, {memory_data,{4284698624,3345494016,{<10870.307.0>,3328596}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,919609344}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1850312,0}}, {context_switches,{187929,0}}, {garbage_collection,{52993,266123907,0}}, {io,{{input,20400709},{output,17641378}}}, {reductions,{107781561,617825}}, {run_queue,0}, {runtime,{10467,62}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:42:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11110.0>} INFO REPORT <0.11110.0> 2011-01-03 13:42:08 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 575 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:12 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 576 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:12 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 577 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:12 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 578 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:14 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 579 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 580 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 581 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 582 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:42:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11110.0>} INFO REPORT <0.11110.0> 2011-01-03 13:42:19 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 583 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 584 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 585 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 586 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:25 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 587 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 588 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 589 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 590 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:42:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11110.0>} INFO REPORT <0.259.0> 2011-01-03 13:42:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 693 auth_errors 0 bucket_conns 4 bytes_read 6543117028 bytes_written 204521310 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 4548601 conn_yields 12832 connection_structures 235 curr_connections 26 curr_items 2825780 curr_items_tot 4875251 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 5121 ep_commit_time 0 ep_commit_time_total 1520 ep_data_age 15 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 3 ep_flush_duration_highwat 297 ep_flush_duration_total 2012 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 803643 ep_io_num_write 4693971 ep_io_read_bytes 1017164475 ep_io_write_bytes 4649399717 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2440086821 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 855282 ep_num_eject_failures 2079605 ep_num_eject_replicas 890274 ep_num_expiry_pager_runs 0 ep_num_non_resident 2058973 ep_num_not_my_vbuckets 854355 ep_num_pager_runs 7 ep_num_value_ejects 2250004 ep_oom_errors 0 ep_overhead 59263777 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 500000 ep_storage_age 0 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 797387 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5310992245 ep_total_del_items 0 ep_total_enqueued 5194331 ep_total_new_items 4688278 ep_total_persisted 4693971 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2499350598 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 688 tap_mutation_received 2439331 tap_mutation_sent 3579249 tap_opaque_received 1982 tap_opaque_sent 2490 tap_vbucket_set_sent 1367 threads 4 time 1294090951 total_connections 1407 uptime 2965 version 1.4.4_304_g7d5a132 INFO REPORT <0.11110.0> 2011-01-03 13:42:31 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 591 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:34 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 592 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:34 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 593 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:34 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 594 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:42:36 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.11110.0> 2011-01-03 13:42:37 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 595 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:42:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11110.0>} INFO REPORT <0.11110.0> 2011-01-03 13:42:41 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 596 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:41 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 597 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:41 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 598 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 599 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11110.0> 2011-01-03 13:42:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 600 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 601 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11110.0> 2011-01-03 13:42:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 602 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:42:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11110.0>} INFO REPORT <0.11110.0> 2011-01-03 13:42:51 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 603 in "default" on 'ns_1@10.2.1.101' CRASH REPORT <0.11110.0> 2011-01-03 13:42:56 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.11110.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,603}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_janitor,do_sanify_chain,5}, {ns_janitor,sanify_chain,5}, {ns_janitor,'-sanify/3-lc$^0/1-0-',4}, {ns_janitor,'-sanify/3-lc$^0/1-0-',4}, {ns_janitor,cleanup,1}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 4181 stack_size 24 reductions 1453597 INFO REPORT <0.110.0> 2011-01-03 13:42:56 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 603}, 30000]}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 512 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 513 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 516 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 517 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 520 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 521 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 524 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 525 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 528 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 529 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 532 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 533 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 536 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 537 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 540 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 541 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 544 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 545 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 548 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 549 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 552 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 553 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 556 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 557 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 560 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 561 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 564 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 565 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 568 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 569 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 572 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 573 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 576 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 577 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 580 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 581 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 584 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 585 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 588 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 589 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 592 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 593 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 596 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 597 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 600 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 601 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 604 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 605 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:42:57 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 606 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:42:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11276.0>} INFO REPORT <0.11276.0> 2011-01-03 13:43:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 607 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11276.0> 2011-01-03 13:43:04 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 608 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:43:04 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 609 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:43:04 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 610 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11276.0> 2011-01-03 13:43:06 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 611 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.93.0> 2011-01-03 13:43:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,90987,400401}}, {active_buckets,["default"]}, {memory, [{total,23047992}, {processes,14709340}, {processes_used,14690548}, {system,8338652}, {atom,560301}, {atom_used,557531}, {binary,195120}, {code,4570913}, {ets,1591036}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2999}, {memory_data,{4284698624,4220108800,{<0.299.0>,6656756}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,30355456}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2990405,0}}, {context_switches,{895522,0}}, {garbage_collection,{160682,1803916095,0}}, {io,{{input,91835377},{output,45822293}}}, {reductions,{725191088,2743568}}, {run_queue,0}, {runtime,{41761,187}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,90987,400402}}, {active_buckets,["default"]}, {memory, [{total,14136952}, {processes,5855916}, {processes_used,5845852}, {system,8281036}, {atom,559813}, {atom_used,556363}, {binary,229328}, {code,4551541}, {ets,1527108}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2929}, {memory_data,{4284698624,4251910144,{<11993.387.0>,5385512}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,30322688}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2923041,2151}}, {context_switches,{521833,0}}, {garbage_collection,{119280,1133847888,0}}, {io,{{input,70024669},{output,34594466}}}, {reductions,{351239525,411215}}, {run_queue,0}, {runtime,{26956,31}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,90988,164400}}, {active_buckets,["default"]}, {memory, [{total,15101464}, {processes,7380340}, {processes_used,7367372}, {system,7721124}, {atom,541077}, {atom_used,528868}, {binary,313848}, {code,4280811}, {ets,1173260}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1918}, {memory_data,{4284698624,3356319744,{<10870.307.0>,4114268}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,926334976}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1910326,0}}, {context_switches,{193786,0}}, {garbage_collection,{54641,275061842,0}}, {io,{{input,20676948},{output,17920546}}}, {reductions,{111374164,610042}}, {run_queue,0}, {runtime,{10888,15}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:43:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11276.0>} INFO REPORT <0.11276.0> 2011-01-03 13:43:10 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 612 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:43:10 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 613 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11276.0> 2011-01-03 13:43:10 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 614 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:43:13 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' CRASH REPORT <0.11276.0> 2011-01-03 13:43:15 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.11276.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,614}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_janitor,do_sanify_chain,5}, {ns_janitor,sanify_chain,5}, {ns_janitor,'-sanify/3-lc$^0/1-0-',4}, {ns_janitor,'-sanify/3-lc$^0/1-0-',4}, {ns_janitor,cleanup,1}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 6765 stack_size 24 reductions 1463366 INFO REPORT <0.110.0> 2011-01-03 13:43:15 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 614}, 30000]}} INFO REPORT <0.11327.0> 2011-01-03 13:43:16 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 512 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 513 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 516 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 517 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 520 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 521 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 524 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 525 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 528 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 529 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 532 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 533 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 536 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 537 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 540 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 541 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 544 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 545 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 548 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 549 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 552 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 553 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 556 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 557 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 560 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 561 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 564 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 565 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 568 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 569 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 572 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 573 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 576 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 577 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 580 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 581 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 584 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 585 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 588 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 589 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 592 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 593 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 596 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 597 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 600 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 601 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 604 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 605 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 608 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 609 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 612 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 613 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:17 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 615 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:43:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11327.0>} INFO REPORT <0.11327.0> 2011-01-03 13:43:19 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 616 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:19 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 617 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:19 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 618 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11327.0> 2011-01-03 13:43:23 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 619 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11327.0> 2011-01-03 13:43:26 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 620 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:26 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 621 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:26 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 622 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:43:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11327.0>} INFO REPORT <0.11327.0> 2011-01-03 13:43:29 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 623 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11327.0> 2011-01-03 13:43:31 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 624 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:31 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 625 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:31 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 626 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11327.0> 2011-01-03 13:43:34 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 627 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.11327.0> 2011-01-03 13:43:36 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 628 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:36 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 629 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:36 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 630 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:43:36 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:43:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11327.0>} INFO REPORT <0.105.0> 2011-01-03 13:43:38 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.11327.0> 2011-01-03 13:43:39 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 631 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:43:40 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.11327.0> 2011-01-03 13:43:41 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 632 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11327.0> 2011-01-03 13:43:41 =============================================================================== ns_1@10.2.1.100:ns_janitor:205: Deleting dead vbucket 634 in "default" on 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:43:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 635 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 636 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 637 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 638 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 639 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 640 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 641 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 642 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 643 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 644 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 645 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 646 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 647 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 648 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 649 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 650 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 651 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 652 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 653 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 654 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 655 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 656 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 657 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 658 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 659 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 660 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 661 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 662 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 663 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 664 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 665 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 666 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 667 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 668 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 669 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 670 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 671 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 672 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 673 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 674 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 675 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 676 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 677 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 678 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 679 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 680 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 681 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_janitor:219: Setting vbucket 'ns_1@10.2.1.101' in "default" on 682 from replica to dead because we don't have all copies INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:180: Killing replicator {child_id, [1023,1022,1021,1020,1019, 1018,1017,1016,1015,1014, 1013,1012,1011,1010,1009, 1008,1007,1006,1005,1004, 1003,1002,1001,1000,999, 998,997,996,995,994,993, 992,991,990,989,988,987, 986,985,984,983,982,981, 980,979,978,977,976,975, 974,973,972,971,970,969, 968,967,966,965,964,963, 962,961,960,959,958,957, 956,955,954,953,952,951, 950,949,948,947,946,945, 944,943,942,941,940,939, 938,937,936,935,934,933, 932,931,930,929,928,927, 926,925,924,923,922,921, 920,919,918,917,916,915, 914,913,912,911,910,909, 908,907,906,905,904,903, 902,901,900,899,898,897, 896,895,894,893,892,891, 890,889,888,887,886,885, 884,883,882,881,880,879, 878,877,876,875,874,873, 872,871,870,869,868,867, 866,865,864,863,862,861, 860,859,858,857,856,855, 854,853,852,851,850,849, 848,847,846,845,844,843, 842,841,840,839,838,837, 836,835,834,833,832,831, 830,829,828,827,826,825, 824,823,822,821,820,819, 818,817,816,815,814,813, 812,811,810,809,808,807, 806,805,804,803,802,801, 800,799,798,797,796,795, 794,793,792,791,790,789, 788,787,786,785,784,783, 782,781,780,779,778,777, 776,775,774,773,772,771, 770,769,768,767,766,765, 764,763,762,761,760,759, 758,757,756,755,754,753, 752,751,750,749,748,747, 746,745,744,743,742,741, 740,739,738,737,736,735, 734,733,732,731,730,729, 728,727,726,725,724,723, 722,721,720,719,718,717, 716,715,714,713,712,711, 710,709,708,707,706,705, 704,703,702,701,700,699, 698,697,696,695,694,693, 692,691,690,689,688,687, 686,685,684,683,682,681, 680,679,678,677,676,675, 674,673,672,671,670,669, 668,667,666,665,664,663, 662,661,660,659,658,657, 656,655,654,653,652,651, 650,649,648,647,646,645, 644,643,642,641,640,639, 638,637,636,635], 'ns_1@10.2.1.101'} on node 'ns_1@10.2.1.100' SUPERVISOR REPORT <0.260.0> 2011-01-03 13:43:43 =============================================================================== Reporting supervisor {local,'ns_vbm_sup-default'} Child process errorContext shutdown_error reason shutdown pid <0.10700.0> name {child_id,[1023,1022,1021,1020,1019,1018,1017,1016,1015,1014,1013, 1012,1011,1010,1009,1008,1007,1006,1005,1004,1003,1002, 1001,1000,999,998,997,996,995,994,993,992,991,990,989,988, 987,986,985,984,983,982,981,980,979,978,977,976,975,974, 973,972,971,970,969,968,967,966,965,964,963,962,961,960, 959,958,957,956,955,954,953,952,951,950,949,948,947,946, 945,944,943,942,941,940,939,938,937,936,935,934,933,932, 931,930,929,928,927,926,925,924,923,922,921,920,919,918, 917,916,915,914,913,912,911,910,909,908,907,906,905,904, 903,902,901,900,899,898,897,896,895,894,893,892,891,890, 889,888,887,886,885,884,883,882,881,880,879,878,877,876, 875,874,873,872,871,870,869,868,867,866,865,864,863,862, 861,860,859,858,857,856,855,854,853,852,851,850,849,848, 847,846,845,844,843,842,841,840,839,838,837,836,835,834, 833,832,831,830,829,828,827,826,825,824,823,822,821,820, 819,818,817,816,815,814,813,812,811,810,809,808,807,806, 805,804,803,802,801,800,799,798,797,796,795,794,793,792, 791,790,789,788,787,786,785,784,783,782,781,780,779,778, 777,776,775,774,773,772,771,770,769,768,767,766,765,764, 763,762,761,760,759,758,757,756,755,754,753,752,751,750, 749,748,747,746,745,744,743,742,741,740,739,738,737,736, 735,734,733,732,731,730,729,728,727,726,725,724,723,722, 721,720,719,718,717,716,715,714,713,712,711,710,709,708, 707,706,705,704,703,702,701,700,699,698,697,696,695,694, 693,692,691,690,689,688,687,686,685,684,683,682,681,680, 679,678,677,676,675,674,673,672,671,670,669,668,667,666, 665,664,663,662,661,660,659,658,657,656,655,654,653,652, 651,650,649,648,647,646,645,644,643,642,641,640,639,638, 637,636,635], 'ns_1@10.2.1.101'} start_function {ns_port_server,start_link, [vbucketmigrator, "./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210","-d", "10.2.1.101:11210","-A","-v","-b","1023","-b", "1022","-b","1021","-b","1020","-b","1019","-b", "1018","-b","1017","-b","1016","-b","1015","-b", "1014","-b","1013","-b","1012","-b","1011","-b", "1010","-b","1009","-b","1008","-b","1007","-b", "1006","-b","1005","-b","1004","-b","1003","-b", "1002","-b","1001","-b","1000","-b","999","-b", "998","-b","997","-b","996","-b","995","-b","994", "-b","993","-b","992","-b","991","-b","990","-b", "989","-b","988","-b","987","-b","986","-b","985", "-b","984","-b","983","-b","982","-b","981","-b", "980","-b","979","-b","978","-b","977","-b","976", "-b","975","-b","974","-b","973","-b","972","-b", "971","-b","970","-b","969","-b","968","-b","967", "-b","966","-b","965","-b","964","-b","963","-b", "962","-b","961","-b","960","-b","959","-b","958", "-b","957","-b","956","-b","955","-b","954","-b", "953","-b","952","-b","951","-b","950","-b","949", "-b","948","-b","947","-b","946","-b","945","-b", "944","-b","943","-b","942","-b","941","-b","940", "-b","939","-b","938","-b","937","-b","936","-b", "935","-b","934","-b","933","-b","932","-b","931", "-b","930","-b","929","-b","928","-b","927","-b", "926","-b","925","-b","924","-b","923","-b","922", "-b","921","-b","920","-b","919","-b","918","-b", "917","-b","916","-b","915","-b","914","-b","913", "-b","912","-b","911","-b","910","-b","909","-b", "908","-b","907","-b","906","-b","905","-b","904", "-b","903","-b","902","-b","901","-b","900","-b", "899","-b","898","-b","897","-b","896","-b","895", "-b","894","-b","893","-b","892","-b","891","-b", "890","-b","889","-b","888","-b","887","-b","886", "-b","885","-b","884","-b","883","-b","882","-b", "881","-b","880","-b","879","-b","878","-b","877", "-b","876","-b","875","-b","874","-b","873","-b", "872","-b","871","-b","870","-b","869","-b","868", "-b","867","-b","866","-b","865","-b","864","-b", "863","-b","862","-b","861","-b","860","-b","859", "-b","858","-b","857","-b","856","-b","855","-b", "854","-b","853","-b","852","-b","851","-b","850", "-b","849","-b","848","-b","847","-b","846","-b", "845","-b","844","-b","843","-b","842","-b","841", "-b","840","-b","839","-b","838","-b","837","-b", "836","-b","835","-b","834","-b","833","-b","832", "-b","831","-b","830","-b","829","-b","828","-b", "827","-b","826","-b","825","-b","824","-b","823", "-b","822","-b","821","-b","820","-b","819","-b", "818","-b","817","-b","816","-b","815","-b","814", "-b","813","-b","812","-b","811","-b","810","-b", "809","-b","808","-b","807","-b","806","-b","805", "-b","804","-b","803","-b","802","-b","801","-b", "800","-b","799","-b","798","-b","797","-b","796", "-b","795","-b","794","-b","793","-b","792","-b", "791","-b","790","-b","789","-b","788","-b","787", "-b","786","-b","785","-b","784","-b","783","-b", "782","-b","781","-b","780","-b","779","-b","778", "-b","777","-b","776","-b","775","-b","774","-b", "773","-b","772","-b","771","-b","770","-b","769", "-b","768","-b","767","-b","766","-b","765","-b", "764","-b","763","-b","762","-b","761","-b","760", "-b","759","-b","758","-b","757","-b","756","-b", "755","-b","754","-b","753","-b","752","-b","751", "-b","750","-b","749","-b","748","-b","747","-b", "746","-b","745","-b","744","-b","743","-b","742", "-b","741","-b","740","-b","739","-b","738","-b", "737","-b","736","-b","735","-b","734","-b","733", "-b","732","-b","731","-b","730","-b","729","-b", "728","-b","727","-b","726","-b","725","-b","724", "-b","723","-b","722","-b","721","-b","720","-b", "719","-b","718","-b","717","-b","716","-b","715", "-b","714","-b","713","-b","712","-b","711","-b", "710","-b","709","-b","708","-b","707","-b","706", "-b","705","-b","704","-b","703","-b","702","-b", "701","-b","700","-b","699","-b","698","-b","697", "-b","696","-b","695","-b","694","-b","693","-b", "692","-b","691","-b","690","-b","689","-b","688", "-b","687","-b","686","-b","685","-b","684","-b", "683","-b","682","-b","681","-b","680","-b","679", "-b","678","-b","677","-b","676","-b","675","-b", "674","-b","673","-b","672","-b","671","-b","670", "-b","669","-b","668","-b","667","-b","666","-b", "665","-b","664","-b","663","-b","662","-b","661", "-b","660","-b","659","-b","658","-b","657","-b", "656","-b","655","-b","654","-b","653","-b","652", "-b","651","-b","650","-b","649","-b","648","-b", "647","-b","646","-b","645","-b","644","-b","643", "-b","642","-b","641","-b","640","-b","639","-b", "638","-b","637","-b","636","-b","635"], [use_stdio,stderr_to_stdout, {write_data,[[],"\n"]}]]} restart_type permanent shutdown 10 child_type worker INFO REPORT <0.11327.0> 2011-01-03 13:43:43 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [1023,1022,1021,1020,1019,1018,1017,1016,1015,1014,1013,1012,1011,1010,1009,1008,1007,1006,1005,1004,1003,1002,1001,1000,999,998,997,996,995,994,993,992,991,990,989,988,987,986,985,984,983,982,981,980,979,978,977,976,975,974,973,972,971,970,969,968,967,966,965,964,963,962,961,960,959,958,957,956,955,954,953,952,951,950,949,948,947,946,945,944,943,942,941,940,939,938,937,936,935,934,933,932,931,930,929,928,927,926,925,924,923,922,921,920,919,918,917,916,915,914,913,912,911,910,909,908,907,906,905,904,903,902,901,900,899,898,897,896,895,894,893,892,891,890,889,888,887,886,885,884,883,882,881,880,879,878,877,876,875,874,873,872,871,870,869,868,867,866,865,864,863,862,861,860,859,858,857,856,855,854,853,852,851,850,849,848,847,846,845,844,843,842,841,840,839,838,837,836,835,834,833,832,831,830,829,828,827,826,825,824,823,822,821,820,819,818,817,816,815,814,813,812,811,810,809,808,807,806,805,804,803,802,801,800,799,798,797,796,795,794,793,792,791,790,789,788,787,786,785,784,783,782,781,780,779,778,777,776,775,774,773,772,771,770,769,768,767,766,765,764,763,762,761,760,759,758,757,756,755,754,753,752,751,750,749,748,747,746,745,744,743,742,741,740,739,738,737,736,735,734,733,732,731,730,729,728,727,726,725,724,723,722,721,720,719,718,717,716,715,714,713,712,711,710,709,708,707,706,705,704,703,702,701,700,699,698,697,696,695,694,693,692,691,690,689,688,687,686,685,684,683] in bucket "default" from node 'ns_1@10.2.1.100' to node 'ns_1@10.2.1.101' INFO REPORT <0.11327.0> 2011-01-03 13:43:44 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210","-d","10.2.1.101:11210","-A", "-v","-b","1023","-b","1022","-b","1021","-b","1020","-b","1019","-b", "1018","-b","1017","-b","1016","-b","1015","-b","1014","-b","1013","-b", "1012","-b","1011","-b","1010","-b","1009","-b","1008","-b","1007","-b", "1006","-b","1005","-b","1004","-b","1003","-b","1002","-b","1001","-b", "1000","-b","999","-b","998","-b","997","-b","996","-b","995","-b","994", "-b","993","-b","992","-b","991","-b","990","-b","989","-b","988","-b", "987","-b","986","-b","985","-b","984","-b","983","-b","982","-b","981", "-b","980","-b","979","-b","978","-b","977","-b","976","-b","975","-b", "974","-b","973","-b","972","-b","971","-b","970","-b","969","-b","968", "-b","967","-b","966","-b","965","-b","964","-b","963","-b","962","-b", "961","-b","960","-b","959","-b","958","-b","957","-b","956","-b","955", "-b","954","-b","953","-b","952","-b","951","-b","950","-b","949","-b", "948","-b","947","-b","946","-b","945","-b","944","-b","943","-b","942", "-b","941","-b","940","-b","939","-b","938","-b","937","-b","936","-b", "935","-b","934","-b","933","-b","932","-b","931","-b","930","-b","929", "-b","928","-b","927","-b","926","-b","925","-b","924","-b","923","-b", "922","-b","921","-b","920","-b","919","-b","918","-b","917","-b","916", "-b","915","-b","914","-b","913","-b","912","-b","911","-b","910","-b", "909","-b","908","-b","907","-b","906","-b","905","-b","904","-b","903", "-b","902","-b","901","-b","900","-b","899","-b","898","-b","897","-b", "896","-b","895","-b","894","-b","893","-b","892","-b","891","-b","890", "-b","889","-b","888","-b","887","-b","886","-b","885","-b","884","-b", "883","-b","882","-b","881","-b","880","-b","879","-b","878","-b","877", "-b","876","-b","875","-b","874","-b","873","-b","872","-b","871","-b", "870","-b","869","-b","868","-b","867","-b","866","-b","865","-b","864", "-b","863","-b","862","-b","861","-b","860","-b","859","-b","858","-b", "857","-b","856","-b","855","-b","854","-b","853","-b","852","-b","851", "-b","850","-b","849","-b","848","-b","847","-b","846","-b","845","-b", "844","-b","843","-b","842","-b","841","-b","840","-b","839","-b","838", "-b","837","-b","836","-b","835","-b","834","-b","833","-b","832","-b", "831","-b","830","-b","829","-b","828","-b","827","-b","826","-b","825", "-b","824","-b","823","-b","822","-b","821","-b","820","-b","819","-b", "818","-b","817","-b","816","-b","815","-b","814","-b","813","-b","812", "-b","811","-b","810","-b","809","-b","808","-b","807","-b","806","-b", "805","-b","804","-b","803","-b","802","-b","801","-b","800","-b","799", "-b","798","-b","797","-b","796","-b","795","-b","794","-b","793","-b", "792","-b","791","-b","790","-b","789","-b","788","-b","787","-b","786", "-b","785","-b","784","-b","783","-b","782","-b","781","-b","780","-b", "779","-b","778","-b","777","-b","776","-b","775","-b","774","-b","773", "-b","772","-b","771","-b","770","-b","769","-b","768","-b","767","-b", "766","-b","765","-b","764","-b","763","-b","762","-b","761","-b","760", "-b","759","-b","758","-b","757","-b","756","-b","755","-b","754","-b", "753","-b","752","-b","751","-b","750","-b","749","-b","748","-b","747", "-b","746","-b","745","-b","744","-b","743","-b","742","-b","741","-b", "740","-b","739","-b","738","-b","737","-b","736","-b","735","-b","734", "-b","733","-b","732","-b","731","-b","730","-b","729","-b","728","-b", "727","-b","726","-b","725","-b","724","-b","723","-b","722","-b","721", "-b","720","-b","719","-b","718","-b","717","-b","716","-b","715","-b", "714","-b","713","-b","712","-b","711","-b","710","-b","709","-b","708", "-b","707","-b","706","-b","705","-b","704","-b","703","-b","702","-b", "701","-b","700","-b","699","-b","698","-b","697","-b","696","-b","695", "-b","694","-b","693","-b","692","-b","691","-b","690","-b","689","-b", "688","-b","687","-b","686","-b","685","-b","684","-b","683"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] INFO REPORT <0.105.0> 2011-01-03 13:43:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_688 for 1.00 secs PROGRESS REPORT <0.260.0> 2011-01-03 13:43:44 =============================================================================== supervisor {local,'ns_vbm_sup-default'} started [{pid,<0.11394.0>}, {name,{child_id,[1023,1022,1021,1020,1019,1018,1017,1016,1015,1014, 1013,1012,1011,1010,1009,1008,1007,1006,1005,1004, 1003,1002,1001,1000,999,998,997,996,995,994,993, 992,991,990,989,988,987,986,985,984,983,982,981, 980,979,978,977,976,975,974,973,972,971,970,969, 968,967,966,965,964,963,962,961,960,959,958,957, 956,955,954,953,952,951,950,949,948,947,946,945, 944,943,942,941,940,939,938,937,936,935,934,933, 932,931,930,929,928,927,926,925,924,923,922,921, 920,919,918,917,916,915,914,913,912,911,910,909, 908,907,906,905,904,903,902,901,900,899,898,897, 896,895,894,893,892,891,890,889,888,887,886,885, 884,883,882,881,880,879,878,877,876,875,874,873, 872,871,870,869,868,867,866,865,864,863,862,861, 860,859,858,857,856,855,854,853,852,851,850,849, 848,847,846,845,844,843,842,841,840,839,838,837, 836,835,834,833,832,831,830,829,828,827,826,825, 824,823,822,821,820,819,818,817,816,815,814,813, 812,811,810,809,808,807,806,805,804,803,802,801, 800,799,798,797,796,795,794,793,792,791,790,789, 788,787,786,785,784,783,782,781,780,779,778,777, 776,775,774,773,772,771,770,769,768,767,766,765, 764,763,762,761,760,759,758,757,756,755,754,753, 752,751,750,749,748,747,746,745,744,743,742,741, 740,739,738,737,736,735,734,733,732,731,730,729, 728,727,726,725,724,723,722,721,720,719,718,717, 716,715,714,713,712,711,710,709,708,707,706,705, 704,703,702,701,700,699,698,697,696,695,694,693, 692,691,690,689,688,687,686,685,684,683], 'ns_1@10.2.1.101'}}, {mfa,{ns_port_server,start_link, [vbucketmigrator, "./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.100:11210", "-d","10.2.1.101:11210","-A","-v","-b", "1023","-b","1022","-b","1021","-b","1020", "-b","1019","-b","1018","-b","1017","-b", "1016","-b","1015","-b","1014","-b","1013", "-b","1012","-b","1011","-b","1010","-b", "1009","-b","1008","-b","1007","-b","1006", "-b","1005","-b","1004","-b","1003","-b", "1002","-b","1001","-b","1000","-b","999", "-b","998","-b","997","-b","996","-b","995", "-b","994","-b","993","-b","992","-b","991", "-b","990","-b","989","-b","988","-b","987", "-b","986","-b","985","-b","984","-b","983", "-b","982","-b","981","-b","980","-b","979", "-b","978","-b","977","-b","976","-b","975", "-b","974","-b","973","-b","972","-b","971", "-b","970","-b","969","-b","968","-b","967", "-b","966","-b","965","-b","964","-b","963", "-b","962","-b","961","-b","960","-b","959", "-b","958","-b","957","-b","956","-b","955", "-b","954","-b","953","-b","952","-b","951", "-b","950","-b","949","-b","948","-b","947", "-b","946","-b","945","-b","944","-b","943", "-b","942","-b","941","-b","940","-b","939", "-b","938","-b","937","-b","936","-b","935", "-b","934","-b","933","-b","932","-b","931", "-b","930","-b","929","-b","928","-b","927", "-b","926","-b","925","-b","924","-b","923", "-b","922","-b","921","-b","920","-b","919", "-b","918","-b","917","-b","916","-b","915", "-b","914","-b","913","-b","912","-b","911", "-b","910","-b","909","-b","908","-b","907", "-b","906","-b","905","-b","904","-b","903", "-b","902","-b","901","-b","900","-b","899", "-b","898","-b","897","-b","896","-b","895", "-b","894","-b","893","-b","892","-b","891", "-b","890","-b","889","-b","888","-b","887", "-b","886","-b","885","-b","884","-b","883", "-b","882","-b","881","-b","880","-b","879", "-b","878","-b","877","-b","876","-b","875", "-b","874","-b","873","-b","872","-b","871", "-b","870","-b","869","-b","868","-b","867", "-b","866","-b","865","-b","864","-b","863", "-b","862","-b","861","-b","860","-b","859", "-b","858","-b","857","-b","856","-b","855", "-b","854","-b","853","-b","852","-b","851", "-b","850","-b","849","-b","848","-b","847", "-b","846","-b","845","-b","844","-b","843", "-b","842","-b","841","-b","840","-b","839", "-b","838","-b","837","-b","836","-b","835", "-b","834","-b","833","-b","832","-b","831", "-b","830","-b","829","-b","828","-b","827", "-b","826","-b","825","-b","824","-b","823", "-b","822","-b","821","-b","820","-b","819", "-b","818","-b","817","-b","816","-b","815", "-b","814","-b","813","-b","812","-b","811", "-b","810","-b","809","-b","808","-b","807", "-b","806","-b","805","-b","804","-b","803", "-b","802","-b","801","-b","800","-b","799", "-b","798","-b","797","-b","796","-b","795", "-b","794","-b","793","-b","792","-b","791", "-b","790","-b","789","-b","788","-b","787", "-b","786","-b","785","-b","784","-b","783", "-b","782","-b","781","-b","780","-b","779", "-b","778","-b","777","-b","776","-b","775", "-b","774","-b","773","-b","772","-b","771", "-b","770","-b","769","-b","768","-b","767", "-b","766","-b","765","-b","764","-b","763", "-b","762","-b","761","-b","760","-b","759", "-b","758","-b","757","-b","756","-b","755", "-b","754","-b","753","-b","752","-b","751", "-b","750","-b","749","-b","748","-b","747", "-b","746","-b","745","-b","744","-b","743", "-b","742","-b","741","-b","740","-b","739", "-b","738","-b","737","-b","736","-b","735", "-b","734","-b","733","-b","732","-b","731", "-b","730","-b","729","-b","728","-b","727", "-b","726","-b","725","-b","724","-b","723", "-b","722","-b","721","-b","720","-b","719", "-b","718","-b","717","-b","716","-b","715", "-b","714","-b","713","-b","712","-b","711", "-b","710","-b","709","-b","708","-b","707", "-b","706","-b","705","-b","704","-b","703", "-b","702","-b","701","-b","700","-b","699", "-b","698","-b","697","-b","696","-b","695", "-b","694","-b","693","-b","692","-b","691", "-b","690","-b","689","-b","688","-b","687", "-b","686","-b","685","-b","684","-b","683"], [use_stdio,stderr_to_stdout, {write_data,[[],"\n"]}]]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.11327.0> 2011-01-03 13:43:44 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:180: Killing replicator {child_id, [511,510,509,508,507,506, 505,504,503,502,501,500, 499,498,497,496,495,494, 493,492,491,490,489,488, 487,486,485,484,483,482, 481,480,479,478,477,476, 475,474,473,472,471,470, 469,468,467,466,465,464, 463,462,461,460,459,458, 457,456,455,454,453,452, 451,450,449,448,447,446, 445,444,443,442,441,440, 439,438,437,436,435,434, 433,432,431,430,429,428, 427,426,425,424,423,422, 421,420,419,418,417,416, 415,414,413,412,411,410, 409,408,407,406,405,404, 403,402,401,400,399,398, 397,396,395,394,393,392, 391,390,389,388,387,386, 385,384,383,382,381,380, 379,378,377,376,375,374, 373,372,371,370,369,368, 367,366,365,364,363,362, 361,360,359,358,357,356, 355,354,353,352,351,350, 349,348,347,346,345,344, 343,342,341,340,339,338, 337,336,335,334,333,332, 331,330,329,328,327,326, 325,324,323,322,321,320, 319,318,317,316,315,314, 313,312,311,310,309,308, 307,306,305,304,303,302, 301,300,299,298,297,296, 295,294,293,292,291,290, 289,288,287,286,285,284, 283,282,281,280,279,278, 277,276,275,274,273,272, 271,270,269,268,267,266, 265,264,263,262,261,260, 259,258,257,256,255,254, 253,252,251,250,249,248, 247,246,245,244,243,242, 241,240,239,238,237,236, 235,234,233,232,231,230, 229,228,227,226,225,224, 223,222,221,220,219,218, 217,216,215,214,213,212, 211,210,209,208,207,206, 205,204,203,202,201,200, 199,198,197,196,195,194, 193,192,191,190,189,188, 187,186,185,184,183,182, 181,180,179,178,177,176, 175,174,173,172,171,170, 169,168,167,166,165,164, 163,162,161,160,159,158, 157,156,155,154,153,152, 151,150,149,148,147,146, 145,144,143,142,141,140, 139,138,137,136,135,134, 133,132,131,130,129,128, 127,126,125,124,123,122, 121,120,119,118,117,116, 115,114,113,112,111,110, 109,108,107,106,105,104, 103,102,101,100,99,98,97, 96,95,94,93,92,91,90,89, 88,87,86,85,84,83,82,81, 80,79,78,77,76,75,74,73, 72,71], 'ns_1@10.2.1.100'} on node 'ns_1@10.2.1.101' INFO REPORT <0.11327.0> 2011-01-03 13:43:45 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [511,510,509,508,507,506,505,504,503,502,501,500,499,498,497,496,495,494,493,492,491,490,489,488,487,486,485,484,483,482,481,480,479,478,477,476,475,474,473,472,471,470,469,468,467,466,465,464,463,462,461,460,459,458,457,456,455,454,453,452,451,450,449,448,447,446,445,444,443,442,441,440,439,438,437,436,435,434,433,432,431,430,429,428,427,426,425,424,423,422,421,420,419,418,417,416,415,414,413,412,411,410,409,408,407,406,405,404,403,402,401,400,399,398,397,396,395,394,393,392,391,390,389,388,387,386,385,384,383,382,381,380,379,378,377,376,375,374,373,372,371,370,369,368,367,366,365,364,363,362,361,360,359,358,357,356,355,354,353,352,351,350,349,348,347,346,345,344,343,342,341,340,339,338,337,336,335,334,333,332,331,330,329,328,327,326,325,324,323,322,321,320,319,318,317,316,315,314,313,312,311,310,309,308,307,306,305,304,303,302,301,300,299,298,297,296,295,294,293,292,291,290,289,288,287,286,285,284,283,282,281,280,279,278,277,276,275,274,273,272,271,270,269,268,267,266,265,264,263,262,261,260,259,258,257,256,255,254,253,252,251,250,249,248,247,246,245,244,243,242,241,240,239,238,237,236,235,234,233,232,231,230,229,228,227,226,225,224,223,222,221,220,219,218,217,216,215,214,213,212,211,210,209,208,207,206,205,204,203,202,201,200,199,198,197,196,195,194,193,192,191,190,189,188,187,186,185,184,183,182,181,180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165,164,163] in bucket "default" from node 'ns_1@10.2.1.101' to node 'ns_1@10.2.1.100' INFO REPORT <0.11327.0> 2011-01-03 13:43:45 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:251: Args = [vbucketmigrator,"./bin/vbucketmigrator/vbucketmigrator", ["-e","-a","default","-h","10.2.1.101:11210","-d","10.2.1.100:11210","-A", "-v","-b","511","-b","510","-b","509","-b","508","-b","507","-b","506","-b", "505","-b","504","-b","503","-b","502","-b","501","-b","500","-b","499", "-b","498","-b","497","-b","496","-b","495","-b","494","-b","493","-b", "492","-b","491","-b","490","-b","489","-b","488","-b","487","-b","486", "-b","485","-b","484","-b","483","-b","482","-b","481","-b","480","-b", "479","-b","478","-b","477","-b","476","-b","475","-b","474","-b","473", "-b","472","-b","471","-b","470","-b","469","-b","468","-b","467","-b", "466","-b","465","-b","464","-b","463","-b","462","-b","461","-b","460", "-b","459","-b","458","-b","457","-b","456","-b","455","-b","454","-b", "453","-b","452","-b","451","-b","450","-b","449","-b","448","-b","447", "-b","446","-b","445","-b","444","-b","443","-b","442","-b","441","-b", "440","-b","439","-b","438","-b","437","-b","436","-b","435","-b","434", "-b","433","-b","432","-b","431","-b","430","-b","429","-b","428","-b", "427","-b","426","-b","425","-b","424","-b","423","-b","422","-b","421", "-b","420","-b","419","-b","418","-b","417","-b","416","-b","415","-b", "414","-b","413","-b","412","-b","411","-b","410","-b","409","-b","408", "-b","407","-b","406","-b","405","-b","404","-b","403","-b","402","-b", "401","-b","400","-b","399","-b","398","-b","397","-b","396","-b","395", "-b","394","-b","393","-b","392","-b","391","-b","390","-b","389","-b", "388","-b","387","-b","386","-b","385","-b","384","-b","383","-b","382", "-b","381","-b","380","-b","379","-b","378","-b","377","-b","376","-b", "375","-b","374","-b","373","-b","372","-b","371","-b","370","-b","369", "-b","368","-b","367","-b","366","-b","365","-b","364","-b","363","-b", "362","-b","361","-b","360","-b","359","-b","358","-b","357","-b","356", "-b","355","-b","354","-b","353","-b","352","-b","351","-b","350","-b", "349","-b","348","-b","347","-b","346","-b","345","-b","344","-b","343", "-b","342","-b","341","-b","340","-b","339","-b","338","-b","337","-b", "336","-b","335","-b","334","-b","333","-b","332","-b","331","-b","330", "-b","329","-b","328","-b","327","-b","326","-b","325","-b","324","-b", "323","-b","322","-b","321","-b","320","-b","319","-b","318","-b","317", "-b","316","-b","315","-b","314","-b","313","-b","312","-b","311","-b", "310","-b","309","-b","308","-b","307","-b","306","-b","305","-b","304", "-b","303","-b","302","-b","301","-b","300","-b","299","-b","298","-b", "297","-b","296","-b","295","-b","294","-b","293","-b","292","-b","291", "-b","290","-b","289","-b","288","-b","287","-b","286","-b","285","-b", "284","-b","283","-b","282","-b","281","-b","280","-b","279","-b","278", "-b","277","-b","276","-b","275","-b","274","-b","273","-b","272","-b", "271","-b","270","-b","269","-b","268","-b","267","-b","266","-b","265", "-b","264","-b","263","-b","262","-b","261","-b","260","-b","259","-b", "258","-b","257","-b","256","-b","255","-b","254","-b","253","-b","252", "-b","251","-b","250","-b","249","-b","248","-b","247","-b","246","-b", "245","-b","244","-b","243","-b","242","-b","241","-b","240","-b","239", "-b","238","-b","237","-b","236","-b","235","-b","234","-b","233","-b", "232","-b","231","-b","230","-b","229","-b","228","-b","227","-b","226", "-b","225","-b","224","-b","223","-b","222","-b","221","-b","220","-b", "219","-b","218","-b","217","-b","216","-b","215","-b","214","-b","213", "-b","212","-b","211","-b","210","-b","209","-b","208","-b","207","-b", "206","-b","205","-b","204","-b","203","-b","202","-b","201","-b","200", "-b","199","-b","198","-b","197","-b","196","-b","195","-b","194","-b", "193","-b","192","-b","191","-b","190","-b","189","-b","188","-b","187", "-b","186","-b","185","-b","184","-b","183","-b","182","-b","181","-b", "180","-b","179","-b","178","-b","177","-b","176","-b","175","-b","174", "-b","173","-b","172","-b","171","-b","170","-b","169","-b","168","-b", "167","-b","166","-b","165","-b","164","-b","163"], [use_stdio,stderr_to_stdout,{write_data,[[],"\n"]}]] INFO REPORT <0.11394.0> 2011-01-03 13:43:46 =============================================================================== vbucketmigrator<0.11394.0>: Connecting to {Sock 10.2.1.101:11210} vbucketmigrator<0.11394.0>: Authenticating towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.11394.0>: Authenticated towards: {Sock 10.2.1.101:11210} vbucketmigrator<0.11394.0>: Connecting to {Sock 10.2.1.100:11210} vbucketmigrator<0.11394.0>: Authenticating towards: {Sock 10.2.1.100:11210} vbucketmigrator<0.11394.0>: Authenticated towards: {Sock 10.2.1.100:11210} INFO REPORT <0.105.0> 2011-01-03 13:43:47 =============================================================================== memcached<0.105.0>: Backfilling token for eq_tapq:anon_688 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_688 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_688 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_688 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_688 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_688 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_688 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_688 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_688 went invalid. Stopping backfill. memcached<0.105.0>: Backfilling token for eq_tapq:anon_688 went invalid. Stopping backfill. ERROR REPORT <0.105.0> 2011-01-03 13:43:47 =============================================================================== ns_1@10.2.1.100:ns_port_server:130: Dropped 379 log lines from memcached INFO REPORT <0.11327.0> 2011-01-03 13:43:47 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:43:47 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.110.0> 2011-01-03 13:43:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11327.0>} INFO REPORT <0.105.0> 2011-01-03 13:43:49 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:43:51 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:43:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:43:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:43:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:43:58 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:43:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11327.0>} INFO REPORT <0.105.0> 2011-01-03 13:43:59 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:01 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:02 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:03 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs CRASH REPORT <0.11327.0> 2011-01-03 13:44:04 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.11327.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,625}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_vbm_sup,start_replicas,4}, {lists,foreach,2}, {ns_vbm_sup,'-set_replicas/2-fun-1-',3}, {lists,foreach,2}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 46368 stack_size 24 reductions 2562897 INFO REPORT <0.110.0> 2011-01-03 13:44:04 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 625}, 30000]}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 512 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 513 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 516 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 517 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 520 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 521 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 524 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 525 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 528 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 529 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 532 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 533 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 536 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 537 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 540 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 541 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 544 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 545 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 548 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 549 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 552 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 553 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 556 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 557 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 560 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 561 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 564 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 565 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 568 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 569 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 572 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 573 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 576 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 577 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 580 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 581 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 584 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 585 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 588 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 589 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 592 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 593 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 596 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 597 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 600 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 601 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 604 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 605 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 608 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 609 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 612 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 613 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 616 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 617 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 620 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 621 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 624 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.105.0> 2011-01-03 13:44:05 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.11437.0> 2011-01-03 13:44:05 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:44:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:44:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,91047,413401}}, {active_buckets,["default"]}, {memory, [{total,22404408}, {processes,14074524}, {processes_used,14055276}, {system,8329884}, {atom,560301}, {atom_used,557531}, {binary,187800}, {code,4570913}, {ets,1589724}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3059}, {memory_data,{4284698624,4249149440,{<0.299.0>,6656756}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,53084160}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{3050403,0}}, {context_switches,{910625,0}}, {garbage_collection,{163050,1830279935,0}}, {io,{{input,93541155},{output,46300405}}}, {reductions,{737504969,854531}}, {run_queue,0}, {runtime,{42650,47}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,91048,396401}}, {active_buckets,["default"]}, {memory, [{total,17390808}, {processes,9092044}, {processes_used,9082468}, {system,8298764}, {atom,559813}, {atom_used,556363}, {binary,218600}, {code,4551541}, {ets,1555772}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2990}, {memory_data,{4284698624,4201447424,{<11993.387.0>,3328596}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,40386560}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2982291,1403}}, {context_switches,{532332,0}}, {garbage_collection,{121158,1156201628,0}}, {io,{{input,72317731},{output,36579040}}}, {reductions,{360115794,570834}}, {run_queue,0}, {runtime,{27892,78}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,91048,178400}}, {active_buckets,["default"]}, {memory, [{total,16414928}, {processes,8696404}, {processes_used,8683436}, {system,7718524}, {atom,541077}, {atom_used,528868}, {binary,312664}, {code,4280811}, {ets,1171588}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,1978}, {memory_data,{4284698624,3359277056,{<10870.307.0>,2843024}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,926580736}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{1970324,0}}, {context_switches,{199616,0}}, {garbage_collection,{56293,283666269,0}}, {io,{{input,20929465},{output,18190421}}}, {reductions,{114967566,592549}}, {run_queue,0}, {runtime,{11325,15}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:44:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11437.0>} INFO REPORT <0.105.0> 2011-01-03 13:44:09 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:11 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.259.0> 2011-01-03 13:44:11 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 695 auth_errors 0 bucket_conns 4 bytes_read 6630060040 bytes_written 210132787 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 4548601 conn_yields 14957 connection_structures 235 curr_connections 26 curr_items 2825780 curr_items_tot 4875251 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 5121 ep_commit_time 0 ep_commit_time_total 1520 ep_data_age 15 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 3 ep_flush_duration_highwat 297 ep_flush_duration_total 2012 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 811823 ep_io_num_write 4693971 ep_io_read_bytes 1023617259 ep_io_write_bytes 4649399717 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2305856021 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 1088646 ep_num_eject_failures 2148726 ep_num_eject_replicas 890274 ep_num_expiry_pager_runs 0 ep_num_non_resident 2292337 ep_num_not_my_vbuckets 854355 ep_num_pager_runs 7 ep_num_value_ejects 2483368 ep_oom_errors 0 ep_overhead 60930593 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 500000 ep_storage_age 0 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 805567 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5310992245 ep_total_del_items 0 ep_total_enqueued 5194331 ep_total_new_items 4688278 ep_total_persisted 4693971 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2366786614 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 689 tap_mutation_received 2546645 tap_mutation_sent 3604019 tap_opaque_received 2332 tap_opaque_sent 2832 tap_vbucket_set_sent 1367 threads 4 time 1294091050 total_connections 1409 uptime 3064 version 1.4.4_304_g7d5a132 INFO REPORT <0.105.0> 2011-01-03 13:44:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:14 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:15 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:18 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:44:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11437.0>} CRASH REPORT <0.11437.0> 2011-01-03 13:44:18 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.11437.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,620}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_vbm_sup,start_replicas,4}, {lists,foreach,2}, {ns_vbm_sup,'-set_replicas/2-fun-1-',3}, {lists,foreach,2}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 17711 stack_size 24 reductions 2455274 INFO REPORT <0.110.0> 2011-01-03 13:44:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 620}, 30000]}} INFO REPORT <0.11489.0> 2011-01-03 13:44:19 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.105.0> 2011-01-03 13:44:20 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 512 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 513 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 516 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 517 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 520 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 521 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 524 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 525 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 528 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 529 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 532 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 533 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 536 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 537 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 540 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 541 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 544 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 545 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 548 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 549 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 552 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 553 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 556 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 557 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 560 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 561 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 564 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 565 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 568 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 569 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 572 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 573 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 576 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 577 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 580 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 581 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 584 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 585 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 588 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 589 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 592 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 593 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 596 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 597 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 600 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 601 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 604 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 605 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 608 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 609 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 612 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 613 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 616 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 617 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11489.0> 2011-01-03 13:44:20 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:44:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:23 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:25 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:26 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:27 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:44:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11489.0>} INFO REPORT <0.105.0> 2011-01-03 13:44:28 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:30 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:32 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:33 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:35 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:37 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:38 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:44:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11489.0>} INFO REPORT <0.105.0> 2011-01-03 13:44:39 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:40 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:44:41 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:44:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:46 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:48 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:44:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11489.0>} INFO REPORT <0.105.0> 2011-01-03 13:44:49 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:52 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:56 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:44:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:44:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11489.0>} INFO REPORT <0.105.0> 2011-01-03 13:44:59 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:01 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:02 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:04 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:05 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:45:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,91107,411401}}, {active_buckets,["default"]}, {memory, [{total,21792352}, {processes,13443316}, {processes_used,13423612}, {system,8349036}, {atom,560301}, {atom_used,557531}, {binary,172616}, {code,4570913}, {ets,1624764}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3119}, {memory_data,{4284698624,4239364096,{<0.299.0>,6656756}}}, {disk_data, [{"C:\\",48162864,59},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,82796544}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{3110401,0}}, {context_switches,{925956,0}}, {garbage_collection,{165567,1855317319,0}}, {io,{{input,96032624},{output,47516342}}}, {reductions,{752314035,3253979}}, {run_queue,0}, {runtime,{43571,266}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,91107,411402}}, {active_buckets,["default"]}, {memory, [{total,17010584}, {processes,8663636}, {processes_used,8653572}, {system,8346948}, {atom,559813}, {atom_used,556363}, {binary,235048}, {code,4551541}, {ets,1587484}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3049}, {memory_data,{4284698624,4202885120,{<11993.387.0>,3328596}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,49741824}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{3042273,1372}}, {context_switches,{541993,0}}, {garbage_collection,{122831,1178968795,0}}, {io,{{input,73944948},{output,36919050}}}, {reductions,{368893064,873617}}, {run_queue,1}, {runtime,{28657,63}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,91108,176400}}, {active_buckets,["default"]}, {memory, [{total,16716080}, {processes,8957236}, {processes_used,8944268}, {system,7758844}, {atom,541077}, {atom_used,528868}, {binary,317608}, {code,4280811}, {ets,1207180}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2038}, {memory_data,{4284698624,3359105024,{<10870.307.0>,4114268}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,930652160}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2030322,0}}, {context_switches,{205856,0}}, {garbage_collection,{58217,293038445,0}}, {io,{{input,21960491},{output,19204853}}}, {reductions,{118668869,590296}}, {run_queue,0}, {runtime,{11528,47}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:45:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11489.0>} INFO REPORT <0.105.0> 2011-01-03 13:45:08 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:10 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:45:18 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.105.0> 2011-01-03 13:45:18 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:45:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11489.0>} INFO REPORT <0.105.0> 2011-01-03 13:45:19 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs CRASH REPORT <0.11489.0> 2011-01-03 13:45:20 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.11489.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,573}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_vbm_sup,start_replicas,4}, {lists,foreach,2}, {ns_vbm_sup,'-set_replicas/2-fun-1-',3}, {lists,foreach,2}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 46368 stack_size 24 reductions 2452040 INFO REPORT <0.110.0> 2011-01-03 13:45:20 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 573}, 30000]}} INFO REPORT <0.105.0> 2011-01-03 13:45:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 512 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 513 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 516 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 517 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 520 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 521 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 524 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 525 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 528 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 529 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 532 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 533 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 536 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 537 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 540 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 541 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 544 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 545 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 548 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 549 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 552 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 553 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 556 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 557 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 560 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 561 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 564 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 565 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 568 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 569 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 572 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 573 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11622.0> 2011-01-03 13:45:21 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:45:23 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:45:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11622.0>} INFO REPORT <0.105.0> 2011-01-03 13:45:29 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:45:29 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:45:31 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:33 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:45:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11622.0>} INFO REPORT <0.105.0> 2011-01-03 13:45:43 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:45:46 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:45:48 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:45:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11622.0>} INFO REPORT <0.105.0> 2011-01-03 13:45:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.259.0> 2011-01-03 13:45:51 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 695 auth_errors 0 bucket_conns 4 bytes_read 6871622539 bytes_written 211797603 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 4548601 conn_yields 22374 connection_structures 235 curr_connections 26 curr_items 2825780 curr_items_tot 4923082 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 5394 ep_commit_time 0 ep_commit_time_total 1597 ep_data_age 372 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 3 ep_flush_duration_highwat 297 ep_flush_duration_total 2012 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 228003 ep_io_num_read 820134 ep_io_num_write 4965970 ep_io_read_bytes 1030112542 ep_io_write_bytes 4861669817 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2140552070 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 1088646 ep_num_eject_failures 2148726 ep_num_eject_replicas 1122586 ep_num_expiry_pager_runs 0 ep_num_non_resident 2564335 ep_num_not_my_vbuckets 854355 ep_num_pager_runs 7 ep_num_value_ejects 2755366 ep_oom_errors 0 ep_overhead 50301130 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 47831 ep_storage_age 370 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 813878 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5353338790 ep_total_del_items 0 ep_total_enqueued 5242165 ep_total_new_items 4953690 ep_total_persisted 4965969 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2190853200 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 689 tap_mutation_received 2841443 tap_mutation_sent 3686749 tap_opaque_received 2332 tap_opaque_sent 2832 tap_vbucket_set_sent 1367 threads 4 time 1294091151 total_connections 1409 uptime 3165 version 1.4.4_304_g7d5a132 INFO REPORT <0.105.0> 2011-01-03 13:45:52 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:55 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:45:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:45:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11622.0>} INFO REPORT <0.105.0> 2011-01-03 13:46:05 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:46:07 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:46:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,91167,409401}}, {active_buckets,["default"]}, {memory, [{total,21751272}, {processes,13400148}, {processes_used,13380444}, {system,8351124}, {atom,560301}, {atom_used,557531}, {binary,175448}, {code,4570913}, {ets,1623644}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3179}, {memory_data,{4284698624,4202168320,{<0.299.0>,6656756}}}, {disk_data, [{"C:\\",48162864,60},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,69480448}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{3170399,0}}, {context_switches,{938993,0}}, {garbage_collection,{167912,1879682734,0}}, {io,{{input,97764938},{output,47975715}}}, {reductions,{764865215,3455344}}, {run_queue,0}, {runtime,{44413,296}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,91167,409402}}, {active_buckets,["default"]}, {memory, [{total,19544720}, {processes,11199292}, {processes_used,11189228}, {system,8345428}, {atom,559813}, {atom_used,556363}, {binary,239056}, {code,4551541}, {ets,1581652}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3109}, {memory_data,{4284698624,4203905024,{<11993.8474.0>,9685896}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,71774208}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{3102006,1107}}, {context_switches,{551831,0}}, {garbage_collection,{124574,1201504463,0}}, {io,{{input,75604168},{output,37268417}}}, {reductions,{378148664,776326}}, {run_queue,0}, {runtime,{29530,46}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,91168,174400}}, {active_buckets,["default"]}, {memory, [{total,16800920}, {processes,9039348}, {processes_used,9026380}, {system,7761572}, {atom,541077}, {atom_used,528868}, {binary,322016}, {code,4280811}, {ets,1205236}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2098}, {memory_data,{4284698624,3354697728,{<10870.307.0>,4114268}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,934674432}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2090320,0}}, {context_switches,{211493,0}}, {garbage_collection,{59820,302069827,0}}, {io,{{input,22229065},{output,19499529}}}, {reductions,{122320969,590318}}, {run_queue,0}, {runtime,{11731,47}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:46:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11622.0>} INFO REPORT <0.105.0> 2011-01-03 13:46:10 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:46:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:46:18 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:46:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11622.0>} INFO REPORT <0.105.0> 2011-01-03 13:46:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:46:25 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:46:26 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.110.0> 2011-01-03 13:46:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11622.0>} INFO REPORT <0.110.0> 2011-01-03 13:46:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11622.0>} INFO REPORT <0.105.0> 2011-01-03 13:46:40 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:46:42 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs CRASH REPORT <0.11622.0> 2011-01-03 13:46:46 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.11622.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,68}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_vbm_sup,start_replicas,4}, {lists,foreach,2}, {ns_vbm_sup,'-set_replicas/2-fun-1-',3}, {lists,foreach,2}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 17711 stack_size 24 reductions 2429531 INFO REPORT <0.110.0> 2011-01-03 13:46:46 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 68}, 30000]}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 68 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11806.0> 2011-01-03 13:46:47 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:46:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11806.0>} INFO REPORT <0.105.0> 2011-01-03 13:46:50 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:46:52 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:46:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:46:54 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs CRASH REPORT <0.11806.0> 2011-01-03 13:46:55 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.11806.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,67}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_vbm_sup,start_replicas,4}, {lists,foreach,2}, {ns_vbm_sup,'-set_replicas/2-fun-1-',3}, {lists,foreach,2}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 17711 stack_size 24 reductions 2393713 INFO REPORT <0.110.0> 2011-01-03 13:46:55 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 67}, 30000]}} INFO REPORT <0.105.0> 2011-01-03 13:46:55 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 67 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11826.0> 2011-01-03 13:46:56 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:46:57 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:46:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11826.0>} INFO REPORT <0.105.0> 2011-01-03 13:46:58 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:00 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs CRASH REPORT <0.11826.0> 2011-01-03 13:47:02 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.11826.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,64}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_vbm_sup,start_replicas,4}, {lists,foreach,2}, {ns_vbm_sup,'-set_replicas/2-fun-1-',3}, {lists,foreach,2}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 17711 stack_size 24 reductions 2392630 INFO REPORT <0.110.0> 2011-01-03 13:47:02 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 64}, 30000]}} INFO REPORT <0.105.0> 2011-01-03 13:47:02 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 55 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 56 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 59 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 60 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 63 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 64 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11848.0> 2011-01-03 13:47:03 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:47:04 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:06 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:47:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,91227,423402}}, {active_buckets,["default"]}, {memory, [{total,21859144}, {processes,13475276}, {processes_used,13455572}, {system,8383868}, {atom,560301}, {atom_used,557531}, {binary,174384}, {code,4570913}, {ets,1657452}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3239}, {memory_data,{4284698624,4215156736,{<0.299.0>,6656756}}}, {disk_data, [{"C:\\",48162864,60},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,95129600}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{3230397,0}}, {context_switches,{953003,0}}, {garbage_collection,{170255,1903600401,0}}, {io,{{input,99524602},{output,48415710}}}, {reductions,{779760708,5892097}}, {run_queue,0}, {runtime,{45411,405}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,91227,423401}}, {active_buckets,["default"]}, {memory, [{total,19211232}, {processes,10853772}, {processes_used,10843708}, {system,8357460}, {atom,559813}, {atom_used,556363}, {binary,222800}, {code,4551541}, {ets,1610212}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3169}, {memory_data,{4284698624,4227395584,{<11993.387.0>,5385512}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,82124800}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{3164984,4087}}, {context_switches,{562142,0}}, {garbage_collection,{126557,1224932787,0}}, {io,{{input,77922481},{output,38210746}}}, {reductions,{387376845,3157906}}, {run_queue,0}, {runtime,{30232,249}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,91228,172400}}, {active_buckets,["default"]}, {memory, [{total,16686704}, {processes,8880924}, {processes_used,8867956}, {system,7805780}, {atom,541077}, {atom_used,528868}, {binary,332048}, {code,4280811}, {ets,1239284}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2158}, {memory_data,{4284698624,3350102016,{<10870.307.0>,4114268}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,934453248}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2150318,0}}, {context_switches,{217091,0}}, {garbage_collection,{61502,310999433,0}}, {io,{{input,22481273},{output,19761468}}}, {reductions,{125926142,599235}}, {run_queue,0}, {runtime,{11856,78}}]}]}] INFO REPORT <0.105.0> 2011-01-03 13:47:08 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:47:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11848.0>} INFO REPORT <0.105.0> 2011-01-03 13:47:09 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:47:10 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.105.0> 2011-01-03 13:47:11 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:13 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:15 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:16 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:17 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:47:18 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11848.0>} INFO REPORT <0.105.0> 2011-01-03 13:47:19 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:20 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:21 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:23 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:25 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs CRASH REPORT <0.11848.0> 2011-01-03 13:47:26 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.11848.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,52}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_vbm_sup,start_replicas,4}, {lists,foreach,2}, {ns_vbm_sup,'-set_replicas/2-fun-1-',3}, {lists,foreach,2}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 17711 stack_size 24 reductions 2391732 INFO REPORT <0.110.0> 2011-01-03 13:47:26 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 52}, 30000]}} INFO REPORT <0.105.0> 2011-01-03 13:47:26 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:269: Waiting for "default" on ['ns_1@10.2.1.101'] INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 44 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 45 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 46 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 47 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 48 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 49 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 50 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 51 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 52 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11911.0> 2011-01-03 13:47:27 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.101' INFO REPORT <0.85.0> 2011-01-03 13:47:27 =============================================================================== Pulling config from: 'ns_1@10.2.1.102' INFO REPORT <0.105.0> 2011-01-03 13:47:28 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:47:28 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11911.0>} INFO REPORT <0.105.0> 2011-01-03 13:47:30 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.259.0> 2011-01-03 13:47:31 =============================================================================== ns_1@10.2.1.100:stats_collector:71: Stats for bucket "default": auth_cmds 695 auth_errors 0 bucket_conns 4 bytes_read 6871626503 bytes_written 212993190 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 4548601 conn_yields 22374 connection_structures 235 curr_connections 26 curr_items 2825780 curr_items_tot 4923082 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 5625 ep_commit_time 0 ep_commit_time_total 1653 ep_data_age 144 ep_data_age_highwat 495 ep_db_cleaner_status complete ep_db_strategy multiMTDB ep_dbinit 81 ep_dbname c:/Program Files/Membase/Server/data/ns_1/default ep_dbshards 4 ep_expired 0 ep_flush_duration 154 ep_flush_duration_highwat 297 ep_flush_duration_total 2166 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 820134 ep_io_num_write 5193972 ep_io_read_bytes 1030112542 ep_io_write_bytes 5039901090 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_kv_size 2013709110 ep_max_data_size 3426746368 ep_max_txn_size 1000 ep_mem_high_wat 2570059776 ep_mem_low_wat 2056047820 ep_min_data_age 0 ep_num_active_non_resident 1088646 ep_num_eject_failures 2148726 ep_num_eject_replicas 1250498 ep_num_expiry_pager_runs 0 ep_num_non_resident 2729951 ep_num_not_my_vbuckets 854355 ep_num_pager_runs 7 ep_num_value_ejects 2920982 ep_oom_errors 0 ep_overhead 39340111 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 47831 ep_storage_age 381 ep_storage_age_highwat 492 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 813878 ep_tap_keepalive 0 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 5353338790 ep_total_del_items 0 ep_total_enqueued 5242165 ep_total_new_items 5175508 ep_total_persisted 5193971 ep_vbucket_del 512 ep_vbucket_del_avg_walltime 118096 ep_vbucket_del_fail 0 ep_vbucket_del_max_walltime 140400 ep_vbucket_del_total_walltime 60465600 ep_version 1.6.2 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 31200 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.7-rc limit_maxbytes 67108864 mem_used 2053049221 pid 2160 pointer_size 64 rejected_conns 0 tap_connect_received 689 tap_mutation_received 2841443 tap_mutation_sent 3746779 tap_opaque_received 2332 tap_opaque_sent 2832 tap_vbucket_set_sent 1367 threads 4 time 1294091251 total_connections 1409 uptime 3265 version 1.4.4_304_g7d5a132 INFO REPORT <0.85.0> 2011-01-03 13:47:35 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.110.0> 2011-01-03 13:47:38 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11911.0>} INFO REPORT <0.105.0> 2011-01-03 13:47:38 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:41 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:44 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:47 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:47:48 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11911.0>} INFO REPORT <0.105.0> 2011-01-03 13:47:53 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:47:55 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.110.0> 2011-01-03 13:47:58 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11911.0>} CRASH REPORT <0.11911.0> 2011-01-03 13:47:59 =============================================================================== Crashing process initial_call {ns_janitor,cleanup,['Argument__1']} pid <0.11911.0> registered_name [] error_info {exit,{{{badmatch,{error,timeout}}, [{mc_client_binary,cmd_binary_vocal_recv,5}, {mc_client_binary,delete_vbucket,2}, {ns_memcached,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, {gen_server,call, [{'ns_memcached-default','ns_1@10.2.1.101'}, {delete_vbucket,43}, 30000]}}, [{gen_server,call,3}, {lists,foreach,2}, {ns_vbm_sup,start_replicas,4}, {lists,foreach,2}, {ns_vbm_sup,'-set_replicas/2-fun-1-',3}, {lists,foreach,2}, {proc_lib,init_p_do_apply,3}]} ancestors [<0.110.0>,ns_server_sup,ns_server_cluster_sup,<0.60.0>] messages [] links [<0.110.0>] dictionary [] trap_exit false status running heap_size 17711 stack_size 24 reductions 2384777 INFO REPORT <0.110.0> 2011-01-03 13:47:59 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:174: Janitor run completed for bucket "default" with reason {{{badmatch, {error, timeout}}, [{mc_client_binary, cmd_binary_vocal_recv, 5}, {mc_client_binary, delete_vbucket, 2}, {ns_memcached, handle_call, 3}, {gen_server, handle_msg, 5}, {proc_lib, init_p_do_apply, 3}]}, {gen_server, call, [{'ns_memcached-default', 'ns_1@10.2.1.101'}, {delete_vbucket, 43}, 30000]}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 0 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 1 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 2 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 3 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 4 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 5 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 6 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 7 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 8 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 9 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 10 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 11 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 12 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 13 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 14 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 15 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 16 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 17 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 18 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 19 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 20 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 21 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 22 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 23 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 24 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 25 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 26 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 27 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 28 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 29 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 30 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 31 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 32 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 33 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 34 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 35 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 36 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 37 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 38 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 39 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 40 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 41 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 42 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_janitor:193: Killing replicators for vbucket 43 on master 'ns_1@10.2.1.102' because of {{'ns_1@10.2.1.102', active}, {'ns_1@10.2.1.101', dead}} INFO REPORT <0.11992.0> 2011-01-03 13:48:00 =============================================================================== ns_1@10.2.1.100:ns_vbm_sup:260: Starting replicator for vbuckets [633,632,629,628,625,624,621,620,617,616,613,612,609,608,605,604,601,600,597,596,593,592,589,588,585,584,581,580,577,576,573,572,569,568,565,564,561,560,557,556,553,552,549,548,545,544,541,540,537,536,533,532,529,528,525,524,521,520,517,516,513,512,68,67,64,63,60,59,56,55,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0] in bucket "default" from node 'ns_1@10.2.1.102' to node 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:48:03 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:48:05 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:48:06 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.93.0> 2011-01-03 13:48:08 =============================================================================== ns_1@10.2.1.100:ns_doctor:82: Current node statuses: [{'ns_1@10.2.1.100', [{last_heard,{1294,91287,764400}}, {active_buckets,["default"]}, {memory, [{total,22125376}, {processes,13685396}, {processes_used,13666276}, {system,8439980}, {atom,560789}, {atom_used,558343}, {binary,181288}, {code,4582580}, {ets,1693668}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3299}, {memory_data,{4284698624,4184846336,{<0.299.0>,6656756}}}, {disk_data, [{"C:\\",48162864,60},{"D:\\",51279476,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.5}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,92594176}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{3290395,0}}, {context_switches,{966801,0}}, {garbage_collection,{172672,1926558401,0}}, {io,{{input,101363188},{output,50652200}}}, {reductions,{792350692,811632}}, {run_queue,0}, {runtime,{46067,32}}]}]}, {'ns_1@10.2.1.101', [{last_heard,{1294,91287,748402}}, {active_buckets,["default"]}, {memory, [{total,19416288}, {processes,11039004}, {processes_used,11028940}, {system,8377284}, {atom,559813}, {atom_used,556363}, {binary,246232}, {code,4551541}, {ets,1606372}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {inets,"5.2"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,3229}, {memory_data,{4284698624,4210270208,{<11993.387.0>,5385512}}}, {disk_data, [{"C:\\",46243100,46},{"D:\\",51809624,0},{"G:\\",33929248,18}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,87326720}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{3222470,1575}}, {context_switches,{571544,0}}, {garbage_collection,{128134,1245695829,0}}, {io,{{input,79617831},{output,38541094}}}, {reductions,{396374397,3168310}}, {run_queue,0}, {runtime,{30981,265}}]}]}, {'ns_1@10.2.1.102', [{last_heard,{1294,91288,170400}}, {active_buckets,["default"]}, {memory, [{total,16913040}, {processes,8815924}, {processes_used,8802956}, {system,8097116}, {atom,541565}, {atom_used,529955}, {binary,576704}, {code,4290459}, {ets,1275748}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.4"}, {mnesia,"4.4.12"}, {kernel,"2.13.4"}, {sasl,"2.1.8"}, {ns_server,"1.6.5r"}, {menelaus,"1.6.5r"}, {stdlib,"1.16.4"}]}, {system_arch,"windows"}, {wall_clock,2218}, {memory_data,{4284698624,3351322624,{<10870.307.0>,4114268}}}, {disk_data, [{"C:\\",49423972,41},{"D:\\",52797620,0},{"G:\\",34724465,17}]}, {replication,[{"default",0.0}]}, {system_memory_data, [{total_memory,4284698624}, {free_memory,933093376}, {system_total_memory,4284698624}]}, {statistics, [{wall_clock,{2210316,0}}, {context_switches,{223166,0}}, {garbage_collection,{63145,320518908,0}}, {io,{{input,23485359},{output,21305805}}}, {reductions,{129761022,609670}}, {run_queue,0}, {runtime,{12058,46}}]}]}] INFO REPORT <0.110.0> 2011-01-03 13:48:08 =============================================================================== ns_1@10.2.1.100:ns_orchestrator:164: Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.11992.0>} INFO REPORT <0.105.0> 2011-01-03 13:48:08 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.85.0> 2011-01-03 13:48:10 =============================================================================== Pulling config from: 'ns_1@10.2.1.101' INFO REPORT <0.105.0> 2011-01-03 13:48:10 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs INFO REPORT <0.105.0> 2011-01-03 13:48:12 =============================================================================== memcached<0.105.0>: Suspend eq_tapq:anon_689 for 1.00 secs